diff --git a/zio-json/shared/src/main/scala-2.x/zio/json/macros.scala b/zio-json/shared/src/main/scala-2.x/zio/json/macros.scala index 6b6d7356..c54f81f8 100644 --- a/zio-json/shared/src/main/scala-2.x/zio/json/macros.scala +++ b/zio-json/shared/src/main/scala-2.x/zio/json/macros.scala @@ -4,7 +4,7 @@ import magnolia1._ import zio.Chunk import zio.json.JsonDecoder.JsonError import zio.json.ast.Json -import zio.json.internal.{ FastStringWrite, FieldEncoder, Lexer, RecordingReader, RetractReader, StringMatrix, Write } +import zio.json.internal.{ FieldEncoder, Lexer, RecordingReader, RetractReader, StringMatrix, Write } import scala.annotation._ import scala.language.experimental.macros @@ -219,61 +219,50 @@ object DeriveJsonDecoder { type Typeclass[A] = JsonDecoder[A] def join[A](ctx: CaseClass[JsonDecoder, A])(implicit config: JsonCodecConfiguration): JsonDecoder[A] = { - val (transformNames, nameTransform): (Boolean, String => String) = - ctx.annotations.collectFirst { case jsonMemberNames(format) => format } - .orElse(Some(config.fieldNameMapping)) - .filter(_ != IdentityFormat) - .map(true -> _) - .getOrElse(false -> identity _) - + val nameTransform = + ctx.annotations.collectFirst { case jsonMemberNames(format) => format }.getOrElse(config.fieldNameMapping) val no_extra = ctx.annotations.collectFirst { case _: jsonNoExtraFields => () }.isDefined || !config.allowExtraFields - - if (ctx.parameters.isEmpty) - new CaseObjectDecoder(ctx, no_extra) - else - new CollectionJsonDecoder[A] { - private[this] val (names, aliases): (Array[String], Array[(String, Int)]) = { - val names = new Array[String](ctx.parameters.size) - val aliasesBuilder = Array.newBuilder[(String, Int)] - ctx.parameters.foreach { - var idx = 0 - p => - names(idx) = p.annotations.collectFirst { case jsonField(name) => name } - .getOrElse(if (transformNames) nameTransform(p.label) else p.label) - aliasesBuilder ++= p.annotations.flatMap { - case jsonAliases(alias, aliases @ _*) => (alias +: aliases).map(_ -> idx) - case _ => Seq.empty - } - idx += 1 - } - val aliases = aliasesBuilder.result() - val allFieldNames = names ++ aliases.map(_._1) - if (allFieldNames.length != allFieldNames.distinct.length) { - val aliasNames = aliases.map(_._1) - val collisions = aliasNames - .filter(alias => names.contains(alias) || aliases.count(a => a._1 == alias) > 1) - .distinct - val msg = s"Field names and aliases in case class ${ctx.typeName.full} must be distinct, " + - s"alias(es) ${collisions.mkString(",")} collide with a field or another alias" - throw new AssertionError(msg) - } - (names, aliases) + if (ctx.parameters.isEmpty) new CaseObjectDecoder(ctx, no_extra) + else { + val (names, aliases): (Array[String], Array[(String, Int)]) = { + val names = new Array[String](ctx.parameters.size) + val aliasesBuilder = Array.newBuilder[(String, Int)] + ctx.parameters.foreach { + var idx = 0 + p => + names(idx) = p.annotations.collectFirst { case jsonField(name) => name }.getOrElse(nameTransform(p.label)) + aliasesBuilder ++= p.annotations.flatMap { + case jsonAliases(alias, aliases @ _*) => (alias +: aliases).map(_ -> idx) + case _ => Seq.empty + } + idx += 1 + } + val aliases = aliasesBuilder.result() + val allFieldNames = names ++ aliases.map(_._1) + if (allFieldNames.length != allFieldNames.distinct.length) { + val aliasNames = aliases.map(_._1) + val collisions = aliasNames + .filter(alias => names.contains(alias) || aliases.count(a => a._1 == alias) > 1) + .distinct + val msg = s"Field names and aliases in case class ${ctx.typeName.full} must be distinct, " + + s"alias(es) ${collisions.mkString(",")} collide with a field or another alias" + throw new AssertionError(msg) } - private[this] val len = names.length - private[this] val matrix = new StringMatrix(names, aliases) - private[this] val spans = names.map(JsonError.ObjectAccess) - private[this] val defaults = ctx.parameters.map(_.evaluateDefault.orNull).toArray - private[this] lazy val tcs = - ctx.parameters.map(_.typeclass).toArray.asInstanceOf[Array[JsonDecoder[Any]]] + (names, aliases) + } + new CollectionJsonDecoder[A] { + private[this] val len = names.length + private[this] val matrix = new StringMatrix(names, aliases) + private[this] val spans = names.map(JsonError.ObjectAccess) + private[this] val defaults = ctx.parameters.map(_.evaluateDefault.orNull).toArray + private[this] lazy val tcs = ctx.parameters.map(_.typeclass).toArray.asInstanceOf[Array[JsonDecoder[Any]]] private[this] lazy val namesMap = (names.zipWithIndex ++ aliases).toMap - private[this] val explicitEmptyCollections = ctx.annotations.collectFirst { case a: jsonExplicitEmptyCollections => a.decoding }.getOrElse(config.explicitEmptyCollections.decoding) - private[this] val missingValueDecoder = if (explicitEmptyCollections) { lazy val missingValueDecoders = tcs.map { d => @@ -317,31 +306,33 @@ object DeriveJsonDecoder { Lexer.char(trace, in, '{') // TODO it would be more efficient to have a solution that didn't box - // primitives, but Magnolia does not expose an API for that. Adding + // primitives, but Magnolia does not ealiasesxpose an API for that. Adding // such a feature to Magnolia is the only way to avoid this, e.g. a // ctx.createMutableCons that specialises on the types (with some way // of noting that things have been initialised), which can be called // to instantiate the case class. Would also require JsonDecoder to be // specialised. val ps = new Array[Any](len) - if (Lexer.firstField(trace, in)) + if (Lexer.firstField(trace, in)) { do { val idx = Lexer.field(trace, in, matrix) - if (idx != -1) { - if (ps(idx) != null) Lexer.error("duplicate", trace) - val default = defaults(idx) - ps(idx) = - if ( - (default eq null) || in.nextNonWhitespace() != 'n' && { - in.retract() - true - } - ) tcs(idx).unsafeDecode(spans(idx) :: trace, in) - else if (in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') default() - else Lexer.error("expected 'null'", spans(idx) :: trace) + if (idx >= 0) { + if (ps(idx) == null) { + val default = defaults(idx) + ps(idx) = + if ( + (default eq null) || in.nextNonWhitespace() != 'n' && { + in.retract() + true + } + ) tcs(idx).unsafeDecode(spans(idx) :: trace, in) + else if (in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') default() + else Lexer.error("expected 'null'", spans(idx) :: trace) + } else Lexer.error("duplicate", trace) } else if (no_extra) Lexer.error("invalid extra field", trace) else Lexer.skipValue(trace, in) } while (Lexer.nextField(trace, in)) + } var idx = 0 while (idx < len) { if (ps(idx) == null) { @@ -385,6 +376,7 @@ object DeriveJsonDecoder { case _ => Lexer.error("expected object", trace) } } + } } def split[A](ctx: SealedTrait[JsonDecoder, A])(implicit config: JsonCodecConfiguration): JsonDecoder[A] = { @@ -411,7 +403,7 @@ object DeriveJsonDecoder { new JsonDecoder[A] { def unsafeDecode(trace: List[JsonError], in: RetractReader): A = { val idx = Lexer.enumeration(trace, in, matrix) - if (idx != -1) tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil) + if (idx >= 0) tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil) else Lexer.error("invalid enumeration value", trace) } @@ -434,7 +426,7 @@ object DeriveJsonDecoder { Lexer.char(trace, in, '{') if (Lexer.firstField(trace, in)) { val idx = Lexer.field(trace, in, matrix) - if (idx != -1) { + if (idx >= 0) { val a = tcs(idx).unsafeDecode(spans(idx) :: trace, in).asInstanceOf[A] Lexer.char(trace, in, '}') a @@ -464,9 +456,9 @@ object DeriveJsonDecoder { Lexer.char(trace, in_, '{') if (Lexer.firstField(trace, in_)) { do { - if (Lexer.field(trace, in_, hintmatrix) != -1) { + if (Lexer.field(trace, in_, hintmatrix) >= 0) { val idx = Lexer.enumeration(trace, in_, matrix) - if (idx != -1) { + if (idx >= 0) { in_.rewind() return tcs(idx).unsafeDecode(spans(idx) :: trace, in_).asInstanceOf[A] } else Lexer.error("invalid disambiguator", trace) @@ -511,31 +503,17 @@ object DeriveJsonEncoder { type Typeclass[A] = JsonEncoder[A] def join[A](ctx: CaseClass[JsonEncoder, A])(implicit config: JsonCodecConfiguration): JsonEncoder[A] = - if (ctx.parameters.isEmpty) - caseObjectEncoder.narrow[A] - else + if (ctx.parameters.isEmpty) caseObjectEncoder.narrow[A] + else { + val nameTransform = + ctx.annotations.collectFirst { case jsonMemberNames(format) => format }.getOrElse(config.fieldNameMapping) + val params = ctx.parameters.filter(p => p.annotations.collectFirst { case _: jsonExclude => () }.isEmpty).toArray + val explicitNulls = config.explicitNulls || ctx.annotations.exists(_.isInstanceOf[jsonExplicitNull]) + val explicitEmptyCollections = ctx.annotations.collectFirst { case a: jsonExplicitEmptyCollections => a.encoding } + .getOrElse(config.explicitEmptyCollections.encoding) new JsonEncoder[A] { - private[this] val (transformNames, nameTransform): (Boolean, String => String) = - ctx.annotations.collectFirst { case jsonMemberNames(format) => format } - .orElse(Some(config.fieldNameMapping)) - .filter(_ != IdentityFormat) - .map(true -> _) - .getOrElse(false -> identity) - private[this] val params = ctx.parameters - .filter(p => p.annotations.collectFirst { case _: jsonExclude => () }.isEmpty) - .toArray - - private[this] val explicitNulls = - config.explicitNulls || ctx.annotations.exists(_.isInstanceOf[jsonExplicitNull]) - private[this] val explicitEmptyCollections = - ctx.annotations.collectFirst { case a: jsonExplicitEmptyCollections => - a.encoding - }.getOrElse(config.explicitEmptyCollections.encoding) - private[this] lazy val fields: Array[FieldEncoder[Any, Param[JsonEncoder, A]]] = params.map { p => - val name = p.annotations.collectFirst { case jsonField(name) => - name - }.getOrElse(if (transformNames) nameTransform(p.label) else p.label) + val name = p.annotations.collectFirst { case jsonField(name) => name }.getOrElse(nameTransform(p.label)) val withExplicitNulls = explicitNulls || p.annotations.exists(_.isInstanceOf[jsonExplicitNull]) val withExplicitEmptyCollections = p.annotations.collectFirst { case a: jsonExplicitEmptyCollections => a.encoding @@ -562,27 +540,28 @@ object DeriveJsonEncoder { var idx = 0 var prevFields = false // whether any fields have been written while (idx < fields.length) { - val field = fields(idx) - val p = field.p.dereference(a) + val field = fields(idx) + idx += 1 val encoder = field.encoder + val p = field.p.dereference(a) if ({ (field.flags: @switch) match { - case 0 => !encoder.isEmpty(p) && !encoder.isNothing(p) - case 1 => !encoder.isNothing(p) - case 2 => !encoder.isEmpty(p) - case _ => true + case 0 => encoder.isEmpty(p) || encoder.isNothing(p) + case 1 => encoder.isNothing(p) + case 2 => encoder.isEmpty(p) + case _ => false } - }) { + }) () + else { if (prevFields) { out.write(',') JsonEncoder.pad(indent_, out) } else prevFields = true - JsonEncoder.string.unsafeEncode(field.name, indent_, out) + out.write(field.encodedName) if (indent.isEmpty) out.write(':') else out.write(" : ") encoder.unsafeEncode(p, indent_, out) } - idx += 1 } JsonEncoder.pad(indent, out) out.write('}') @@ -603,6 +582,7 @@ object DeriveJsonEncoder { } .map(Json.Obj.apply) } + } def split[A](ctx: SealedTrait[JsonEncoder, A])(implicit config: JsonCodecConfiguration): JsonEncoder[A] = { val jsonHintFormat: JsonMemberFormat = @@ -610,12 +590,8 @@ object DeriveJsonEncoder { val names: Array[String] = ctx.subtypes.map { p => p.annotations.collectFirst { case jsonHint(name) => name }.getOrElse(jsonHintFormat(p.typeName.short)) }.toArray - val encodedNames: Array[String] = names.map { name => - val out = new FastStringWrite(64) - JsonEncoder.string.unsafeEncode(name, None, out) - out.toString - } - lazy val tcs = ctx.subtypes.map(_.typeclass).toArray.asInstanceOf[Array[JsonEncoder[Any]]] + val encodedNames: Array[String] = names.map(name => JsonEncoder.string.encodeJson(name, None).toString) + lazy val tcs = ctx.subtypes.map(_.typeclass).toArray.asInstanceOf[Array[JsonEncoder[Any]]] val discrim = ctx.annotations.collectFirst { case jsonDiscriminator(n) => n }.orElse(config.sumTypeHandling.discriminatorField) lazy val isEnumeration = config.enumValuesAsStrings && @@ -626,24 +602,14 @@ object DeriveJsonEncoder { def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = { var idx = 0 - while (idx < casts.length) { - if (casts(idx).isDefinedAt(a)) { - out.write(encodedNames(idx)) - return - } - idx += 1 - } + while (!casts(idx).isDefinedAt(a)) idx += 1 + out.write(encodedNames(idx)) } override final def toJsonAST(a: A): Either[String, Json] = { var idx = 0 - while (idx < casts.length) { - if (casts(idx).isDefinedAt(a)) { - return new Right(new Json.Str(names(idx))) - } - idx += 1 - } - throw new IllegalArgumentException // shodn't be reached + while (!casts(idx).isDefinedAt(a)) idx += 1 + new Right(new Json.Str(names(idx))) } } } else if (discrim.isEmpty) { @@ -652,84 +618,54 @@ object DeriveJsonEncoder { def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = { var idx = 0 - while (idx < casts.length) { - val cast = casts(idx) - if (cast.isDefinedAt(a)) { - out.write('{') - val indent_ = JsonEncoder.bump(indent) - JsonEncoder.pad(indent_, out) - out.write(encodedNames(idx)) - if (indent.isEmpty) out.write(':') - else out.write(" : ") - tcs(idx).unsafeEncode(cast(a), indent_, out) - JsonEncoder.pad(indent, out) - out.write('}') - return - } - idx += 1 - } + while (!casts(idx).isDefinedAt(a)) idx += 1 + out.write('{') + val indent_ = JsonEncoder.bump(indent) + JsonEncoder.pad(indent_, out) + out.write(encodedNames(idx)) + if (indent.isEmpty) out.write(':') + else out.write(" : ") + tcs(idx).unsafeEncode(casts(idx)(a), indent_, out) + JsonEncoder.pad(indent, out) + out.write('}') } override def toJsonAST(a: A): Either[String, Json] = { var idx = 0 - while (idx < casts.length) { - val cast = casts(idx) - if (cast.isDefinedAt(a)) { - return tcs(idx).toJsonAST(cast(a)).map { inner => - new Json.Obj(Chunk(names(idx) -> inner)) - } - } - idx += 1 - } - throw new IllegalArgumentException // shodn't be reached + while (!casts(idx).isDefinedAt(a)) idx += 1 + tcs(idx).toJsonAST(casts(idx)(a)).map(inner => new Json.Obj(Chunk(names(idx) -> inner))) } } } else { new JsonEncoder[A] { - private[this] val casts = ctx.subtypes.map(_.cast).toArray - private[this] val hintFieldName = discrim.get - private[this] val encodedHintFieldName = { - val out = new FastStringWrite(64) - JsonEncoder.string.unsafeEncode(hintFieldName, None, out) - out.toString - } + private[this] val casts = ctx.subtypes.map(_.cast).toArray + private[this] val hintFieldName = discrim.get + private[this] val encodedHintFieldName = JsonEncoder.string.encodeJson(hintFieldName, None).toString def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = { var idx = 0 - while (idx < casts.length) { - val cast = casts(idx) - if (cast.isDefinedAt(a)) { - out.write('{') - val indent_ = JsonEncoder.bump(indent) - JsonEncoder.pad(indent_, out) - out.write(encodedHintFieldName) - if (indent.isEmpty) out.write(':') - else out.write(" : ") - out.write(encodedNames(idx)) - // whitespace is always off by 2 spaces at the end, probably not worth fixing - tcs(idx).unsafeEncode(cast(a), indent, new NestedWriter(out, indent_)) - return - } - idx += 1 - } + while (!casts(idx).isDefinedAt(a)) idx += 1 + out.write('{') + val indent_ = JsonEncoder.bump(indent) + JsonEncoder.pad(indent_, out) + out.write(encodedHintFieldName) + if (indent.isEmpty) out.write(':') + else out.write(" : ") + out.write(encodedNames(idx)) + // whitespace is always off by 2 spaces at the end, probably not worth fixing + tcs(idx).unsafeEncode(casts(idx)(a), indent, new NestedWriter(out, indent_)) } override final def toJsonAST(a: A): Either[String, Json] = { var idx = 0 - while (idx < casts.length) { - val cast = casts(idx) - if (cast.isDefinedAt(a)) { - return tcs(idx).toJsonAST(cast(a)).flatMap { - case o: Json.Obj => - val hintField = hintFieldName -> new Json.Str(names(idx)) - new Right(new Json.Obj(hintField +: o.fields)) // hint field is always first - case _ => - new Left("expected object") - } - } - idx += 1 + while (!casts(idx).isDefinedAt(a)) idx += 1 + tcs(idx).toJsonAST(casts(idx)(a)).flatMap { + case o: Json.Obj => + val hintField = hintFieldName -> new Json.Str(names(idx)) + new Right(new Json.Obj(hintField +: o.fields)) // hint field is always first + case _ => + new Left("expected object") } - throw new IllegalArgumentException // shodn't be reached } } } diff --git a/zio-json/shared/src/main/scala-3/zio/json/macros.scala b/zio-json/shared/src/main/scala-3/zio/json/macros.scala index b82f1e91..3aad818f 100644 --- a/zio-json/shared/src/main/scala-3/zio/json/macros.scala +++ b/zio-json/shared/src/main/scala-3/zio/json/macros.scala @@ -8,7 +8,7 @@ import scala.reflect.* import zio.Chunk import zio.json.JsonDecoder.JsonError import zio.json.ast.Json -import zio.json.internal.{ FastStringWrite, FieldEncoder, Lexer, RecordingReader, RetractReader, StringMatrix, Write } +import zio.json.internal.{ FieldEncoder, Lexer, RecordingReader, RetractReader, StringMatrix, Write } import scala.annotation._ import scala.collection.Factory @@ -229,52 +229,40 @@ private class CaseObjectDecoder[Typeclass[*], A](val ctx: CaseClass[Typeclass, A sealed class JsonDecoderDerivation(config: JsonCodecConfiguration) extends Derivation[JsonDecoder] { self => def join[A](ctx: CaseClass[Typeclass, A]): JsonDecoder[A] = { - val (transformNames, nameTransform): (Boolean, String => String) = - ctx.annotations.collectFirst { case jsonMemberNames(format) => format } - .orElse(Some(config.fieldNameMapping)) - .filter(_ != IdentityFormat) - .map(true -> _) - .getOrElse(false -> identity) - + val nameTransform: String => String = + ctx.annotations.collectFirst { case jsonMemberNames(format) => format }.getOrElse(config.fieldNameMapping) val no_extra = ctx.annotations.collectFirst { case _: jsonNoExtraFields => () }.isDefined || !config.allowExtraFields - - if (ctx.params.isEmpty) { - new CaseObjectDecoder(ctx, no_extra) - } else { - new CollectionJsonDecoder[A] { - private val (names, aliases): (Array[String], Array[(String, Int)]) = { - val names = new Array[String](ctx.params.size) - val aliasesBuilder = Array.newBuilder[(String, Int)] - ctx.params.foreach { - var idx = 0 - p => - names(idx) = p - .annotations - .collectFirst { case jsonField(name) => name } - .getOrElse(if (transformNames) nameTransform(p.label) else p.label) - aliasesBuilder ++= p - .annotations - .flatMap { - case jsonAliases(alias, aliases*) => (alias +: aliases).map(_ -> idx) - case _ => Seq.empty - } - idx += 1 - } - val aliases = aliasesBuilder.result() - val allFieldNames = names ++ aliases.map(_._1) - if (allFieldNames.length != allFieldNames.distinct.length) { - val aliasNames = aliases.map(_._1) - val collisions = aliasNames - .filter(alias => names.contains(alias) || aliases.count { case (a, _) => a == alias } > 1) - .distinct - val msg = s"Field names and aliases in case class ${ctx.typeInfo.full} must be distinct, " + - s"alias(es) ${collisions.mkString(",")} collide with a field or another alias" - throw new AssertionError(msg) - } - (names, aliases) + if (ctx.params.isEmpty) new CaseObjectDecoder(ctx, no_extra) + else { + val (names, aliases): (Array[String], Array[(String, Int)]) = { + val names = new Array[String](ctx.params.size) + val aliasesBuilder = Array.newBuilder[(String, Int)] + ctx.params.foreach { + var idx = 0 + p => + names(idx) = p.annotations.collectFirst { case jsonField(name) => name }.getOrElse(nameTransform(p.label)) + aliasesBuilder ++= p.annotations.flatMap { + case jsonAliases(alias, aliases*) => (alias +: aliases).map(_ -> idx) + case _ => Seq.empty + } + idx += 1 } + val aliases = aliasesBuilder.result() + val allFieldNames = names ++ aliases.map(_._1) + if (allFieldNames.length != allFieldNames.distinct.length) { + val aliasNames = aliases.map(_._1) + val collisions = aliasNames + .filter(alias => names.contains(alias) || aliases.count { case (a, _) => a == alias } > 1) + .distinct + val msg = s"Field names and aliases in case class ${ctx.typeInfo.full} must be distinct, " + + s"alias(es) ${collisions.mkString(",")} collide with a field or another alias" + throw new AssertionError(msg) + } + (names, aliases) + } + new CollectionJsonDecoder[A] { private val len = names.length private val matrix = new StringMatrix(names, aliases) private val spans = names.map(JsonError.ObjectAccess(_)) @@ -282,12 +270,10 @@ sealed class JsonDecoderDerivation(config: JsonCodecConfiguration) extends Deriv private lazy val tcs = IArray.genericWrapArray(ctx.params.map(_.typeclass)).toArray.asInstanceOf[Array[JsonDecoder[Any]]] private lazy val namesMap = (names.zipWithIndex ++ aliases).toMap - private val explicitEmptyCollections = ctx.annotations.collectFirst { case a: jsonExplicitEmptyCollections => a.decoding }.getOrElse(config.explicitEmptyCollections.decoding) - private val missingValueDecoder = if (explicitEmptyCollections) { lazy val missingValueDecoders = tcs.map { d => @@ -333,15 +319,16 @@ sealed class JsonDecoderDerivation(config: JsonCodecConfiguration) extends Deriv if (Lexer.firstField(trace, in)) while({ val idx = Lexer.field(trace, in, matrix) - if (idx != -1) { - if (ps(idx) != null) Lexer.error("duplicate", trace) - val default = defaults(idx) - ps(idx) = if ((default eq null) || in.nextNonWhitespace() != 'n' && { - in.retract() - true - }) tcs(idx).unsafeDecode(spans(idx) :: trace, in) - else if (in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') default() - else Lexer.error("expected 'null'", spans(idx) :: trace) + if (idx >= 0) { + if (ps(idx) == null) { + val default = defaults(idx) + ps(idx) = if ((default eq null) || in.nextNonWhitespace() != 'n' && { + in.retract() + true + }) tcs(idx).unsafeDecode(spans(idx) :: trace, in) + else if (in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') default() + else Lexer.error("expected 'null'", spans(idx) :: trace) + } else Lexer.error("duplicate", trace) } else if (no_extra) Lexer.error("invalid extra field", trace) else Lexer.skipValue(trace, in) Lexer.nextField(trace, in) @@ -366,11 +353,12 @@ sealed class JsonDecoderDerivation(config: JsonCodecConfiguration) extends Deriv o.fields.foreach { kv => namesMap.get(kv._1) match { case Some(idx) => - if (ps(idx) != null) Lexer.error("duplicate", trace) - val default = defaults(idx) - ps(idx) = - if ((default ne null) && (kv._2 eq Json.Null)) default() - else tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, kv._2) + if (ps(idx) == null) { + val default = defaults(idx) + ps(idx) = + if ((default ne null) && (kv._2 eq Json.Null)) default() + else tcs(idx).unsafeFromJsonAST(spans(idx) :: trace, kv._2) + } else Lexer.error("duplicate", trace) case _ => if (no_extra) Lexer.error("invalid extra field", trace) } @@ -416,7 +404,7 @@ sealed class JsonDecoderDerivation(config: JsonCodecConfiguration) extends Deriv new JsonDecoder[A] { def unsafeDecode(trace: List[JsonError], in: RetractReader): A = { val idx = Lexer.enumeration(trace, in, matrix) - if (idx != -1) tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil) + if (idx >= 0) tcs(idx).asInstanceOf[CaseObjectDecoder[JsonDecoder, A]].ctx.rawConstruct(Nil) else Lexer.error("invalid enumeration value", trace) } @@ -438,7 +426,7 @@ sealed class JsonDecoderDerivation(config: JsonCodecConfiguration) extends Deriv Lexer.char(trace, in, '{') if (Lexer.firstField(trace, in)) { val idx = Lexer.field(trace, in, matrix) - if (idx != -1) { + if (idx >= 0) { val a = tcs(idx).unsafeDecode(spans(idx) :: trace, in).asInstanceOf[A] Lexer.char(trace, in, '}') a @@ -468,11 +456,12 @@ sealed class JsonDecoderDerivation(config: JsonCodecConfiguration) extends Deriv Lexer.char(trace, in_, '{') if (Lexer.firstField(trace, in_)) { while ({ - if (Lexer.field(trace, in_, hintmatrix) != -1) { + if (Lexer.field(trace, in_, hintmatrix) >= 0) { val idx = Lexer.enumeration(trace, in_, matrix) - if (idx == -1) Lexer.error("invalid disambiguator", trace) - in_.rewind() - return tcs(idx).unsafeDecode(spans(idx) :: trace, in_).asInstanceOf[A] + if (idx >= 0) { + in_.rewind() + return tcs(idx).unsafeDecode(spans(idx) :: trace, in_).asInstanceOf[A] + } else Lexer.error("invalid disambiguator", trace) } else Lexer.skipValue(trace, in_) Lexer.nextField(trace, in_) }) () @@ -531,34 +520,20 @@ object DeriveJsonDecoder extends JsonDecoderDerivation(JsonCodecConfiguration.de sealed class JsonEncoderDerivation(config: JsonCodecConfiguration) extends Derivation[JsonEncoder] { self => def join[A](ctx: CaseClass[Typeclass, A]): JsonEncoder[A] = - if (ctx.params.isEmpty) { - caseObjectEncoder.narrow[A] - } else { + if (ctx.params.isEmpty) caseObjectEncoder.narrow[A] + else { new JsonEncoder[A] { - private val (transformNames, nameTransform): (Boolean, String => String) = ctx.annotations - .collectFirst { case jsonMemberNames(format) => format } - .orElse(Some(config.fieldNameMapping)) - .filter(_ != IdentityFormat) - .map(true -> _) - .getOrElse(false -> identity) + private val nameTransform = + ctx.annotations.collectFirst { case jsonMemberNames(format) => format }.getOrElse(config.fieldNameMapping) private val params = IArray.genericWrapArray(ctx.params.filterNot { param => param.annotations.collectFirst { case _: jsonExclude => () }.isDefined }).toArray - private val names = params.map { p => - p.annotations.collectFirst { - case jsonField(name) => name - }.getOrElse(if (transformNames) nameTransform(p.label) else p.label) - }.toArray - private val explicitNulls = config.explicitNulls || ctx.annotations.exists(_.isInstanceOf[jsonExplicitNull]) private val explicitEmptyCollections = ctx.annotations.collectFirst { case a: jsonExplicitEmptyCollections => a.encoding }.getOrElse(config.explicitEmptyCollections.encoding) - private lazy val fields: Array[FieldEncoder[Any, CaseClass.Param[JsonEncoder, A]]] = params.map { p => - val name = p.annotations.collectFirst { case jsonField(name) => - name - }.getOrElse(if (transformNames) nameTransform(p.label) else p.label) + val name = p.annotations.collectFirst { case jsonField(name) => name }.getOrElse(nameTransform(p.label)) val withExplicitNulls = explicitNulls || p.annotations.exists(_.isInstanceOf[jsonExplicitNull]) val withExplicitEmptyCollections = p.annotations.collectFirst { case a: jsonExplicitEmptyCollections => a.encoding @@ -586,26 +561,27 @@ sealed class JsonEncoderDerivation(config: JsonCodecConfiguration) extends Deriv var prevFields = false while (idx < fields.length) { val field = fields(idx) - val p = field.p.deref(a) + idx += 1 val encoder = field.encoder + val p = field.p.deref(a) if ({ (field.flags: @switch) match { - case 0 => !encoder.isEmpty(p) && !encoder.isNothing(p) - case 1 => !encoder.isNothing(p) - case 2 => !encoder.isEmpty(p) - case _ => true + case 0 => encoder.isEmpty(p) || encoder.isNothing(p) + case 1 => encoder.isNothing(p) + case 2 => encoder.isEmpty(p) + case _ => false } - }) { + }) () + else { if (prevFields) { out.write(',') JsonEncoder.pad(indent_, out) } else prevFields = true - JsonEncoder.string.unsafeEncode(field.name, indent_, out) + out.write(field.encodedName) if (indent.isEmpty) out.write(':') else out.write(" : ") encoder.unsafeEncode(p, indent_, out) } - idx += 1 } JsonEncoder.pad(indent, out) out.write('}') @@ -634,11 +610,7 @@ sealed class JsonEncoderDerivation(config: JsonCodecConfiguration) extends Deriv val names: Array[String] = IArray.genericWrapArray(ctx.subtypes.map { p => p.annotations.collectFirst { case jsonHint(name) => name }.getOrElse(jsonHintFormat(p.typeInfo.short)) }).toArray - val encodedNames: Array[String] = names.map { name => - val out = new FastStringWrite(64) - JsonEncoder.string.unsafeEncode(name, None, out) - out.toString - } + val encodedNames: Array[String] = names.map(name => JsonEncoder.string.encodeJson(name, None).toString) lazy val tcs = IArray.genericWrapArray(ctx.subtypes.map(_.typeclass)).toArray.asInstanceOf[Array[JsonEncoder[Any]]] val discrim = @@ -652,24 +624,14 @@ sealed class JsonEncoderDerivation(config: JsonCodecConfiguration) extends Deriv def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = { var idx = 0 - while (idx < casts.length) { - if (casts(idx).isDefinedAt(a)) { - out.write(encodedNames(idx)) - return - } - idx += 1 - } + while (!casts(idx).isDefinedAt(a)) idx += 1 + out.write(encodedNames(idx)) } override final def toJsonAST(a: A): Either[String, Json] = { var idx = 0 - while (idx < casts.length) { - if (casts(idx).isDefinedAt(a)) { - return new Right(new Json.Str(names(idx))) - } - idx += 1 - } - throw new IllegalArgumentException // shodn't be reached + while (!casts(idx).isDefinedAt(a)) idx += 1 + new Right(new Json.Str(names(idx))) } } } else if (discrim.isEmpty) { @@ -678,84 +640,54 @@ sealed class JsonEncoderDerivation(config: JsonCodecConfiguration) extends Deriv def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = { var idx = 0 - while (idx < casts.length) { - val cast = casts(idx) - if (cast.isDefinedAt(a)) { - out.write('{') - val indent_ = JsonEncoder.bump(indent) - JsonEncoder.pad(indent_, out) - out.write(encodedNames(idx)) - if (indent.isEmpty) out.write(':') - else out.write(" : ") - tcs(idx).unsafeEncode(cast(a), indent_, out) - JsonEncoder.pad(indent, out) - out.write('}') - return - } - idx += 1 - } + while (!casts(idx).isDefinedAt(a)) idx += 1 + out.write('{') + val indent_ = JsonEncoder.bump(indent) + JsonEncoder.pad(indent_, out) + out.write(encodedNames(idx)) + if (indent.isEmpty) out.write(':') + else out.write(" : ") + tcs(idx).unsafeEncode(casts(idx)(a), indent_, out) + JsonEncoder.pad(indent, out) + out.write('}') } override def toJsonAST(a: A): Either[String, Json] = { var idx = 0 - while (idx < casts.length) { - val cast = casts(idx) - if (cast.isDefinedAt(a)) { - return tcs(idx).toJsonAST(cast(a)).map { inner => - new Json.Obj(Chunk(names(idx) -> inner)) - } - } - idx += 1 - } - throw new IllegalArgumentException // shodn't be reached + while (!casts(idx).isDefinedAt(a)) idx += 1 + tcs(idx).toJsonAST(casts(idx)(a)).map(inner => new Json.Obj(Chunk(names(idx) -> inner))) } } } else { new JsonEncoder[A] { private val casts = IArray.genericWrapArray(ctx.subtypes.map(_.cast)).toArray private val hintFieldName = discrim.get - private val encodedHintFieldName = { - val out = new FastStringWrite(64) - JsonEncoder.string.unsafeEncode(hintFieldName, None, out) - out.toString - } + private val encodedHintFieldName = JsonEncoder.string.encodeJson(hintFieldName, None).toString def unsafeEncode(a: A, indent: Option[Int], out: Write): Unit = { var idx = 0 - while (idx < casts.length) { - val cast = casts(idx) - if (cast.isDefinedAt(a)) { - out.write('{') - val indent_ = JsonEncoder.bump(indent) - JsonEncoder.pad(indent_, out) - out.write(encodedHintFieldName) - if (indent.isEmpty) out.write(':') - else out.write(" : ") - out.write(encodedNames(idx)) - // whitespace is always off by 2 spaces at the end, probably not worth fixing - tcs(idx).unsafeEncode(cast(a), indent, new DeriveJsonEncoder.NestedWriter(out, indent_)) - return - } - idx += 1 - } + while (!casts(idx).isDefinedAt(a)) idx += 1 + out.write('{') + val indent_ = JsonEncoder.bump(indent) + JsonEncoder.pad(indent_, out) + out.write(encodedHintFieldName) + if (indent.isEmpty) out.write(':') + else out.write(" : ") + out.write(encodedNames(idx)) + // whitespace is always off by 2 spaces at the end, probably not worth fixing + tcs(idx).unsafeEncode(casts(idx)(a), indent, new DeriveJsonEncoder.NestedWriter(out, indent_)) } override final def toJsonAST(a: A): Either[String, Json] = { var idx = 0 - while (idx < casts.length) { - val cast = casts(idx) - if (cast.isDefinedAt(a)) { - return tcs(idx).toJsonAST(cast(a)).flatMap { - case o: Json.Obj => - val hintField = hintFieldName -> new Json.Str(names(idx)) - new Right(new Json.Obj(hintField +: o.fields)) // hint field is always first - case _ => - new Left("expected object") - } - } - idx += 1 + while (!casts(idx).isDefinedAt(a)) idx += 1 + tcs(idx).toJsonAST(casts(idx)(a)).flatMap { + case o: Json.Obj => + val hintField = hintFieldName -> new Json.Str(names(idx)) + new Right(new Json.Obj(hintField +: o.fields)) // hint field is always first + case _ => + new Left("expected object") } - throw new IllegalArgumentException // shodn't be reached } } } diff --git a/zio-json/shared/src/main/scala/zio/json/JsonDecoder.scala b/zio-json/shared/src/main/scala/zio/json/JsonDecoder.scala index e770dc5e..14671299 100644 --- a/zio-json/shared/src/main/scala/zio/json/JsonDecoder.scala +++ b/zio-json/shared/src/main/scala/zio/json/JsonDecoder.scala @@ -82,11 +82,11 @@ trait JsonDecoder[A] extends JsonDecoderPlatformSpecific[A] { * Note: This method may not entirely consume the specified character sequence. */ final def decodeJson(str: CharSequence): Either[String, A] = - try Right(unsafeDecode(Nil, new FastStringReader(str))) + try new Right(unsafeDecode(Nil, new FastStringReader(str))) catch { - case JsonDecoder.UnsafeJson(trace) => Left(JsonError.render(trace)) - case _: UnexpectedEnd => Left("Unexpected end of input") - case _: StackOverflowError => Left("Unexpected structure") + case e: JsonDecoder.UnsafeJson => new Left(JsonError.render(e.trace)) + case _: UnexpectedEnd => new Left("Unexpected end of input") + case _: StackOverflowError => new Left("Unexpected structure") } /** @@ -120,7 +120,7 @@ trait JsonDecoder[A] extends JsonDecoderPlatformSpecific[A] { } } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A1 = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): A1 = try self.unsafeFromJsonAST(trace, json) catch { case _: JsonDecoder.UnsafeJson | _: UnexpectedEnd => that.unsafeFromJsonAST(trace, json) @@ -129,7 +129,7 @@ trait JsonDecoder[A] extends JsonDecoderPlatformSpecific[A] { override def unsafeDecodeMissing(trace: List[JsonError]): A1 = try self.unsafeDecodeMissing(trace) catch { - case _: Throwable => that.unsafeDecodeMissing(trace) + case _: JsonDecoder.UnsafeJson | _: UnexpectedEnd => that.unsafeDecodeMissing(trace) } } @@ -149,9 +149,7 @@ trait JsonDecoder[A] extends JsonDecoderPlatformSpecific[A] { def unsafeDecode(trace: List[JsonError], in: RetractReader): B = f(self.unsafeDecode(trace, in)) - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): B = f( - self.unsafeFromJsonAST(trace, json) - ) + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): B = f(self.unsafeFromJsonAST(trace, json)) override def unsafeDecodeMissing(trace: List[JsonError]): B = f(self.unsafeDecodeMissing(trace)) } @@ -170,7 +168,7 @@ trait JsonDecoder[A] extends JsonDecoderPlatformSpecific[A] { case Left(err) => Lexer.error(err, trace) } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): B = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): B = f(self.unsafeFromJsonAST(trace, json)) match { case Right(b) => b case Left(err) => Lexer.error(err, trace) @@ -222,11 +220,11 @@ trait JsonDecoder[A] extends JsonDecoderPlatformSpecific[A] { * more performant implementation. */ final def fromJsonAST(json: Json): Either[String, A] = - try Right(unsafeFromJsonAST(Nil, json)) + try new Right(unsafeFromJsonAST(Nil, json)) catch { - case JsonDecoder.UnsafeJson(trace) => Left(JsonError.render(trace)) - case _: UnexpectedEnd => Left("Unexpected end of input") - case _: StackOverflowError => Left("Unexpected structure") + case e: JsonDecoder.UnsafeJson => new Left(JsonError.render(e.trace)) + case _: UnexpectedEnd => new Left("Unexpected end of input") + case _: StackOverflowError => new Left("Unexpected structure") } } @@ -271,7 +269,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with implicit val string: JsonDecoder[String] = new JsonDecoder[String] { def unsafeDecode(trace: List[JsonError], in: RetractReader): String = Lexer.string(trace, in).toString - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): String = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): String = json match { case s: Json.Str => s.value case _ => Lexer.error("expected string", trace) @@ -281,7 +279,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with implicit val boolean: JsonDecoder[Boolean] = new JsonDecoder[Boolean] { def unsafeDecode(trace: List[JsonError], in: RetractReader): Boolean = Lexer.boolean(trace, in) - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Boolean = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Boolean = json match { case b: Json.Bool => b.value case _ => Lexer.error("expected boolean", trace) @@ -291,7 +289,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with implicit val char: JsonDecoder[Char] = new JsonDecoder[Char] { def unsafeDecode(trace: List[JsonError], in: RetractReader): Char = Lexer.char(trace, in) - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Char = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Char = json match { case s: Json.Str if s.value.length == 1 => s.value.charAt(0) case _ => Lexer.error("expected single character string", trace) @@ -312,7 +310,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with a } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Byte = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Byte = json match { case n: Json.Num => try n.value.byteValueExact @@ -336,7 +334,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with a } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Short = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Short = json match { case n: Json.Num => try n.value.shortValueExact @@ -360,7 +358,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with a } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Int = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Int = json match { case n: Json.Num => try n.value.intValueExact @@ -383,7 +381,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with a } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Long = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Long = json match { case n: Json.Num => try n.value.longValueExact @@ -407,7 +405,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with a } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): java.math.BigInteger = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): java.math.BigInteger = json match { case n: Json.Num => try n.value.toBigIntegerExact @@ -430,7 +428,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with a } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): BigInt = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): BigInt = json match { case n: Json.Num => try BigInt(n.value.toBigIntegerExact) @@ -453,7 +451,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with a } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Float = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Float = json match { case n: Json.Num => n.value.floatValue case s: Json.Str => Lexer.float(trace, new FastStringReader(s.value)) @@ -472,7 +470,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with a } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Double = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Double = json match { case n: Json.Num => n.value.doubleValue case s: Json.Str => Lexer.double(trace, new FastStringReader(s.value)) @@ -491,7 +489,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with a } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): java.math.BigDecimal = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): java.math.BigDecimal = json match { case n: Json.Num => n.value case s: Json.Str => Lexer.bigDecimal(trace, new FastStringReader(s.value)) @@ -510,7 +508,7 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with a } - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): BigDecimal = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): BigDecimal = json match { case n: Json.Num => new BigDecimal(n.value, BigDecimal.defaultMathContext) case s: Json.Str => Lexer.bigDecimal(trace, new FastStringReader(s.value)) @@ -527,19 +525,15 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with override def unsafeDecodeMissing(trace: List[JsonError]): Option[A] = None def unsafeDecode(trace: List[JsonError], in: RetractReader): Option[A] = - if (in.nextNonWhitespace() == 'n') { - if (in.readChar() != 'u' || in.readChar() != 'l' || in.readChar() != 'l') { - Lexer.error("expected 'null'", trace) - } - None - } else { + if (in.nextNonWhitespace() != 'n') { in.retract() new Some(A.unsafeDecode(trace, in)) - } + } else if (in.readChar() == 'u' && in.readChar() == 'l' && in.readChar() == 'l') None + else Lexer.error("expected 'null'", trace) - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Option[A] = - if (json eq Json.Null) None - else new Some(A.unsafeFromJsonAST(trace, json)) + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Option[A] = + if (json ne Json.Null) new Some(A.unsafeFromJsonAST(trace, json)) + else None } // supports multiple representations for compatibility with other libraries, @@ -585,14 +579,16 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with builder: mutable.Builder[A, T[A]] )(implicit A: JsonDecoder[A]): T[A] = { val c = in.nextNonWhitespace() - if (c != '[') Lexer.error("'['", c, trace) - var i: Int = 0 - if (Lexer.firstArrayElement(in)) while ({ - builder += A.unsafeDecode(new JsonError.ArrayAccess(i) :: trace, in) - i += 1 - Lexer.nextArrayElement(trace, in) - }) () - builder.result() + if (c == '[') { + var i = 0 + if (Lexer.firstArrayElement(in)) while ({ + builder += A.unsafeDecode(new JsonError.ArrayAccess(i) :: trace, in) + i += 1 + Lexer.nextArrayElement(trace, in) + }) () + return builder.result() + } + Lexer.error("'['", c, trace) } @inline private[json] def keyValueBuilder[K, V, T[X, Y] <: Iterable[(X, Y)]]( @@ -601,18 +597,20 @@ object JsonDecoder extends GeneratedTupleDecoders with DecoderLowPriority1 with builder: mutable.Builder[(K, V), T[K, V]] )(implicit K: JsonFieldDecoder[K], V: JsonDecoder[V]): T[K, V] = { var c = in.nextNonWhitespace() - if (c != '{') Lexer.error("'{'", c, trace) - if (Lexer.firstField(trace, in)) - while ({ - val field = Lexer.string(trace, in).toString - val trace_ = new JsonError.ObjectAccess(field) :: trace - c = in.nextNonWhitespace() - if (c != ':') Lexer.error("':'", c, trace) - val value = V.unsafeDecode(trace_, in) - builder += ((K.unsafeDecodeField(trace_, field), value)) - Lexer.nextField(trace, in) - }) () - builder.result() + if (c == '{') { + if (Lexer.firstField(trace, in)) + while ({ + val field = Lexer.string(trace, in).toString + val trace_ = new JsonError.ObjectAccess(field) :: trace + c = in.nextNonWhitespace() + if (c != ':') Lexer.error("':'", c, trace) + val value = V.unsafeDecode(trace_, in) + builder += ((K.unsafeDecodeField(trace_, field), value)) + Lexer.nextField(trace, in) + }) () + return builder.result() + } + Lexer.error("'{'", c, trace) } // FIXME: remove in the next major version @@ -647,29 +645,29 @@ private[json] trait DecoderLowPriority1 extends DecoderLowPriority2 { def unsafeDecode(trace: List[JsonError], in: RetractReader): Array[A] = { val c = in.nextNonWhitespace() - if (c != '[') Lexer.error("'['", c, trace) - if (Lexer.firstArrayElement(in)) { - var l = 8 - var x = new Array[A](l) - var i = 0 - while ({ - if (i == l) { - l <<= 1 - val x1 = new Array[A](l) - System.arraycopy(x, 0, x1, 0, i) - x = x1 - } - x(i) = A.unsafeDecode(new JsonError.ArrayAccess(i) :: trace, in) - i += 1 - Lexer.nextArrayElement(trace, in) - }) () - if (i != l) { + if (c == '[') { + if (Lexer.firstArrayElement(in)) { + var l = 8 + var x = new Array[A](l) + var i = 0 + while ({ + if (i == l) { + l <<= 1 + val x1 = new Array[A](l) + System.arraycopy(x, 0, x1, 0, i) + x = x1 + } + x(i) = A.unsafeDecode(new JsonError.ArrayAccess(i) :: trace, in) + i += 1 + Lexer.nextArrayElement(trace, in) + }) () + if (i == l) return x val x1 = new Array[A](i) - _root_.java.lang.System.arraycopy(x, 0, x1, 0, i) - x = x1 - } - x - } else Array.empty + System.arraycopy(x, 0, x1, 0, i) + return x1 + } else return Array.empty + } + Lexer.error("'['", c, trace) } } @@ -690,7 +688,7 @@ private[json] trait DecoderLowPriority1 extends DecoderLowPriority2 { def unsafeDecode(trace: List[JsonError], in: RetractReader): Chunk[A] = builder(trace, in, zio.ChunkBuilder.make[A]()) - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): Chunk[A] = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): Chunk[A] = json match { case a: Json.Arr => a.elements.map { @@ -882,7 +880,7 @@ private[json] trait DecoderLowPriority3 extends DecoderLowPriority4 { def unsafeDecode(trace: List[JsonError], in: RetractReader): A = parseJavaTime(trace, Lexer.string(trace, in).toString) - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): A = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): A = json match { case s: Json.Str => parseJavaTime(trace, s.value) case _ => Lexer.error("expected string", trace) @@ -912,7 +910,7 @@ private[json] trait DecoderLowPriority3 extends DecoderLowPriority4 { implicit val uuid: JsonDecoder[UUID] = new JsonDecoder[UUID] { def unsafeDecode(trace: List[JsonError], in: RetractReader): UUID = Lexer.uuid(trace, in) - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): UUID = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): UUID = json match { case s: Json.Str => try UUIDParser.unsafeParse(s.value) @@ -927,7 +925,7 @@ private[json] trait DecoderLowPriority3 extends DecoderLowPriority4 { def unsafeDecode(trace: List[JsonError], in: RetractReader): java.util.Currency = parseCurrency(trace, Lexer.string(trace, in).toString) - override final def unsafeFromJsonAST(trace: List[JsonError], json: Json): java.util.Currency = + override def unsafeFromJsonAST(trace: List[JsonError], json: Json): java.util.Currency = json match { case s: Json.Str => parseCurrency(trace, s.value) case _ => Lexer.error("expected string", trace) diff --git a/zio-json/shared/src/main/scala/zio/json/JsonEncoder.scala b/zio-json/shared/src/main/scala/zio/json/JsonEncoder.scala index 14204da2..dd66311c 100644 --- a/zio-json/shared/src/main/scala/zio/json/JsonEncoder.scala +++ b/zio-json/shared/src/main/scala/zio/json/JsonEncoder.scala @@ -136,7 +136,7 @@ object JsonEncoder extends GeneratedTupleEncoders with EncoderLowPriority1 with write } - def release(): Unit = level -= 1 // decrease the level of recusrion + def release(): Unit = if (level > 0) level -= 1 // decrease the level of recusrion } private val writePools = new ThreadLocal[FastStringWritePool] { @@ -162,7 +162,7 @@ object JsonEncoder extends GeneratedTupleEncoders with EncoderLowPriority1 with out.write('"') } - override final def toJsonAST(a: String): Either[String, Json] = new Right(new Json.Str(a)) + @inline override final def toJsonAST(a: String): Either[String, Json] = new Right(new Json.Str(a)) private[this] def writeEncoded(a: String, out: Write): Unit = { val len = a.length diff --git a/zio-json/shared/src/main/scala/zio/json/internal/FieldEncoder.scala b/zio-json/shared/src/main/scala/zio/json/internal/FieldEncoder.scala index 73dc5034..bad2a4d7 100644 --- a/zio-json/shared/src/main/scala/zio/json/internal/FieldEncoder.scala +++ b/zio-json/shared/src/main/scala/zio/json/internal/FieldEncoder.scala @@ -11,19 +11,17 @@ private[json] class FieldEncoder[T, P]( val encoder: JsonEncoder[T], val flags: Int ) { + val encodedName: String = JsonEncoder.string.encodeJson(name, None).toString + def encodeOrDefault(t: T)( encode: () => Either[String, Chunk[(String, Json)]], default: Either[String, Chunk[(String, Json)]] ): Either[String, Chunk[(String, Json)]] = (flags: @switch) match { - case 0 => - if (!encoder.isEmpty(t) && !encoder.isNothing(t)) encode() else default - case 1 => - if (!encoder.isNothing(t)) encode() else default - case 2 => - if (!encoder.isEmpty(t)) encode() else default - case _ => - encode() + case 0 => if (encoder.isEmpty(t) || encoder.isNothing(t)) default else encode() + case 1 => if (encoder.isNothing(t)) default else encode() + case 2 => if (encoder.isEmpty(t)) default else encode() + case _ => encode() } } @@ -41,8 +39,9 @@ private[json] object FieldEncoder { encoder, { if (withExplicitNulls) { if (withExplicitEmptyCollections) 3 else 2 - } else if (withExplicitEmptyCollections) 1 - else 0 + } else { + if (withExplicitEmptyCollections) 1 else 0 + } } ) } diff --git a/zio-json/shared/src/main/scala/zio/json/package.scala b/zio-json/shared/src/main/scala/zio/json/package.scala index 7ec99fd4..deca8050 100644 --- a/zio-json/shared/src/main/scala/zio/json/package.scala +++ b/zio-json/shared/src/main/scala/zio/json/package.scala @@ -19,12 +19,12 @@ import zio.json.ast.Json package object json extends JsonPackagePlatformSpecific { implicit final class EncoderOps[A](private val a: A) extends AnyVal { - def toJson(implicit encoder: JsonEncoder[A]): String = encoder.encodeJson(a, None).toString + @inline def toJson(implicit encoder: JsonEncoder[A]): String = encoder.encodeJson(a, None).toString // Jon Pretty's better looking brother, but a bit slower - def toJsonPretty(implicit encoder: JsonEncoder[A]): String = encoder.encodeJson(a, Some(0)).toString + @inline def toJsonPretty(implicit encoder: JsonEncoder[A]): String = encoder.encodeJson(a, Some(0)).toString - def toJsonAST(implicit encoder: JsonEncoder[A]): Either[String, Json] = encoder.toJsonAST(a) + @inline def toJsonAST(implicit encoder: JsonEncoder[A]): Either[String, Json] = encoder.toJsonAST(a) } implicit final class DecoderOps(private val json: CharSequence) extends AnyVal { @@ -38,6 +38,6 @@ package object json extends JsonPackagePlatformSpecific { * * {{{jq '.rows[0].elements[0].distance' input.json}}} */ - def fromJson[A](implicit decoder: JsonDecoder[A]): Either[String, A] = decoder.decodeJson(json) + @inline def fromJson[A](implicit decoder: JsonDecoder[A]): Either[String, A] = decoder.decodeJson(json) } }