Skip to content

Commit

Permalink
Clean up of the benchmark code
Browse files Browse the repository at this point in the history
  • Loading branch information
plokhotnyuk committed Sep 3, 2019
1 parent 30c8a12 commit 75b755b
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 59 deletions.
Original file line number Diff line number Diff line change
@@ -1,15 +1,10 @@
package com.github.plokhotnyuk.jsoniter_scala.benchmark

//import java.lang.reflect.{ParameterizedType, Type}
import java.time._
import java.util.UUID

import com.dslplatform.json._
//import com.dslplatform.json.runtime.ScalaMapEncoder
import com.dslplatform.json.runtime.Settings

import scala.collection.immutable.{BitSet, Seq}
//import scala.collection.immutable.IntMap
import scala.collection.mutable
import scala.reflect.runtime.universe.TypeTag

Expand All @@ -18,22 +13,6 @@ object DslPlatformJson {
.limitDigitsBuffer(Int.MaxValue /*WARNING: don't do this for open-systems*/)
.limitStringBuffer(Int.MaxValue /*WARNING: don't do this for open-systems*/)
.doublePrecision(JsonReader.DoublePrecision.EXACT))
/*
dslJson.registerWriterFactory(primitiveMapWriter)
private[this] def primitiveMapWriter(manifest: Type, dslJson: DslJson[_]): ScalaMapEncoder[Int, Any] = {
manifest match {
case pt: ParameterizedType if pt.getRawType == classOf[IntMap[_]] =>
val valueWriter = dslJson.tryFindWriter(pt.getActualTypeArguments.head)
val encoder = new ScalaMapEncoder[Int, Any](dslJson,true,
Some(NumberConverter.INT_WRITER.asInstanceOf[JsonWriter.WriteObject[Int]]),
Some(valueWriter.asInstanceOf[JsonWriter.WriteObject[Any]]))
dslJson.registerWriter(manifest, encoder)
encoder
case _ => null
}
}
*/
private[this] val threadLocalJsonWriter = new ThreadLocal[JsonWriter] {
override def initialValue(): JsonWriter = dslJson.newWriter
}
Expand Down Expand Up @@ -81,10 +60,6 @@ object DslPlatformJson {
implicit val (mutableSetOfIntsEncoder, mutableSetOfIntsDecoder) = codec[mutable.Set[Int]]
implicit val (missingReqFieldsEncoder, missingReqFieldsDecoder) = codec[MissingRequiredFields]
implicit val (nestedStructsEncoder, nestedStructsDecoder) = codec[NestedStructs]
/* FIXME: DSL-JSON throws NPE at com.dslplatform.json.runtime.Generics.getTypeNameCompat(Generics.java:200)
implicit val (openHashMapOfIntsToBooleansEncoder, openHashMapOfIntsToBooleansDecoder) =
codec[mutable.OpenHashMap[Int, Boolean]]
*/
/* FIXME: DSL-JSON throws java.lang.IllegalArgumentException: requirement failed: Unable to create decoder for com.github.plokhotnyuk.jsoniter_scala.benchmark.Primitives
implicit val (primitivesEncoder, primitivesDecoder) = setupCodecs[Primitives]
*/
Expand All @@ -107,13 +82,4 @@ object DslPlatformJson {

private[this] def codec[T](implicit tag: TypeTag[T]): (JsonWriter.WriteObject[T], JsonReader.ReadObject[T]) =
dslJson.encoder[T] -> dslJson.decoder[T]

/*
private[this] def stringCodec[T](f: String => T): (JsonWriter.WriteObject[T], JsonReader.ReadObject[T]) =
new JsonWriter.WriteObject[T] {
override def write(writer: JsonWriter, value: T): Unit = writer.writeString(value.toString)
} -> new JsonReader.ReadObject[T] {
override def read(reader: JsonReader[_]): T = f(reader.readString())
}
*/
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,13 @@ import com.fasterxml.jackson.core.{JsonFactory, JsonFactoryBuilder, JsonGenerato
import com.fasterxml.jackson.databind._
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.databind.ser.std.StdSerializer
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
import com.fasterxml.jackson.module.afterburner.AfterburnerModule
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import com.github.plokhotnyuk.jsoniter_scala.benchmark.SuitEnum.SuitEnum

import scala.collection.immutable.BitSet
import scala.collection.mutable

object JacksonSerDesers {
def createJacksonMapper: ObjectMapper with ScalaObjectMapper = {
val jsonFactory = new JsonFactoryBuilder()
Expand All @@ -25,14 +23,13 @@ object JacksonSerDesers {
new ObjectMapper(jsonFactory) with ScalaObjectMapper {
registerModule(DefaultScalaModule)
registerModule(new SimpleModule()
.addSerializer(classOf[BitSet], new BitSetSerializer)
.addSerializer(classOf[mutable.BitSet], new MutableBitSetSerializer)
.addSerializer(classOf[Array[Byte]], new ByteArraySerializer)
.addSerializer(classOf[SuitADT], new SuitADTSerializer)
.addSerializer(classOf[SuitEnum], new SuitEnumSerializer)
.addDeserializer(classOf[SuitADT], new SuitADTDeserializer)
.addDeserializer(classOf[SuitEnum], new SuitEnumDeserializer))
registerModule(new JavaTimeModule)
registerModule(new Jdk8Module)
registerModule(new AfterburnerModule)
configure(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, false)
configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
Expand All @@ -55,26 +52,6 @@ object JacksonSerDesers {
jacksonPrettyMapper.configure(SerializationFeature.INDENT_OUTPUT, true)
}

class BitSetSerializer extends StdSerializer[BitSet](classOf[BitSet]) {
override def serialize(value: BitSet, gen: JsonGenerator, provider: SerializerProvider): Unit = {
gen.writeStartArray()
if (!isEmpty(provider, value)) value.foreach(gen.writeNumber)
gen.writeEndArray()
}

override def isEmpty(provider: SerializerProvider, value: BitSet): Boolean = value.isEmpty
}

class MutableBitSetSerializer extends StdSerializer[mutable.BitSet](classOf[mutable.BitSet]) {
override def serialize(value: mutable.BitSet, gen: JsonGenerator, provider: SerializerProvider): Unit = {
gen.writeStartArray()
if (!isEmpty(provider, value)) value.foreach(gen.writeNumber)
gen.writeEndArray()
}

override def isEmpty(provider: SerializerProvider, value: mutable.BitSet): Boolean = value.isEmpty
}

class ByteArraySerializer extends StdSerializer[Array[Byte]](classOf[Array[Byte]]) {
override def serialize(value: Array[Byte], gen: JsonGenerator, provider: SerializerProvider): Unit = {
gen.writeStartArray()
Expand Down

0 comments on commit 75b755b

Please sign in to comment.