From 153acb078ca986922661dd534f6703e1515d6151 Mon Sep 17 00:00:00 2001 From: kciesielski Date: Wed, 16 Aug 2023 09:12:08 +0200 Subject: [PATCH] Scaffolding for pickler derivation --- .scalafix.conf | 1 + build.sbt | 2 +- .../scala-3/sttp/tapir/json/Pickler.scala | 148 ++++++++++++++++++ .../scala-3/sttp/tapir/json/PicklerTest.scala | 45 ++++++ 4 files changed, 195 insertions(+), 1 deletion(-) create mode 100644 json/upickle/src/main/scala-3/sttp/tapir/json/Pickler.scala create mode 100644 json/upickle/src/test/scala-3/sttp/tapir/json/PicklerTest.scala diff --git a/.scalafix.conf b/.scalafix.conf index ab83dd6e1c..ebe46a037e 100644 --- a/.scalafix.conf +++ b/.scalafix.conf @@ -1,3 +1,4 @@ OrganizeImports { groupedImports = Merge + removeUnused = false } diff --git a/build.sbt b/build.sbt index 1a8c355e4f..78181a7666 100644 --- a/build.sbt +++ b/build.sbt @@ -67,7 +67,7 @@ val commonSettings = commonSmlBuildSettings ++ ossPublishSettings ++ Seq( }.value, mimaPreviousArtifacts := Set.empty, // we only use MiMa for `core` for now, using enableMimaSettings ideSkipProject := (scalaVersion.value == scala2_12) || - (scalaVersion.value == scala3) || + (scalaVersion.value == scala2_13) || thisProjectRef.value.project.contains("Native") || thisProjectRef.value.project.contains("JS"), bspEnabled := !ideSkipProject.value, diff --git a/json/upickle/src/main/scala-3/sttp/tapir/json/Pickler.scala b/json/upickle/src/main/scala-3/sttp/tapir/json/Pickler.scala new file mode 100644 index 0000000000..de9277031f --- /dev/null +++ b/json/upickle/src/main/scala-3/sttp/tapir/json/Pickler.scala @@ -0,0 +1,148 @@ +package sttp.tapir.json + +import sttp.tapir.Codec.JsonCodec +import _root_.upickle.AttributeTagged +import sttp.tapir.Schema +import sttp.tapir.Codec +import scala.util.Try +import scala.util.Success +import sttp.tapir.DecodeResult.Error +import sttp.tapir.DecodeResult.Value +import scala.util.Failure +import sttp.tapir.DecodeResult.Error.JsonDecodeException +import _root_.upickle.core.Visitor +import _root_.upickle.core.ObjVisitor +import _root_.upickle.core.ArrVisitor +import scala.compiletime.* +import scala.deriving.Mirror +import scala.util.NotGiven +import scala.reflect.ClassTag +import sttp.tapir.generic.Configuration + +trait TapirPickle[T] extends AttributeTagged: + def rw: this.ReadWriter[T] + +abstract class TapirPickleBase[T] extends TapirPickle[T] + +class DefaultReadWriterWrapper[T](delegateDefault: _root_.upickle.default.ReadWriter[T]) extends TapirPickleBase[T]: + def rw: this.ReadWriter[T] = new ReadWriter[T] { + + override def visitArray(length: Int, index: Int): ArrVisitor[Any, T] = delegateDefault.visitArray(length, index) + + override def visitFloat64String(s: String, index: Int): T = delegateDefault.visitFloat64String(s, index) + + override def visitFloat32(d: Float, index: Int): T = delegateDefault.visitFloat32(d, index) + + override def visitObject(length: Int, jsonableKeys: Boolean, index: Int): ObjVisitor[Any, T] = + delegateDefault.visitObject(length, jsonableKeys, index) + + override def visitFloat64(d: Double, index: Int): T = delegateDefault.visitFloat64(d, index) + + override def visitInt32(i: Int, index: Int): T = delegateDefault.visitInt32(i, index) + + override def visitInt64(i: Long, index: Int): T = delegateDefault.visitInt64(i, index) + + override def write0[V](out: Visitor[?, V], v: T): V = delegateDefault.write0(out, v) + + override def visitBinary(bytes: Array[Byte], offset: Int, len: Int, index: Int): T = + delegateDefault.visitBinary(bytes, offset, len, index) + + override def visitExt(tag: Byte, bytes: Array[Byte], offset: Int, len: Int, index: Int): T = + delegateDefault.visitExt(tag, bytes, offset, len, index) + + override def visitNull(index: Int): T = delegateDefault.visitNull(index) + + override def visitChar(s: Char, index: Int): T = delegateDefault.visitChar(s, index) + + override def visitFalse(index: Int): T = delegateDefault.visitFalse(index) + + override def visitString(s: CharSequence, index: Int): T = delegateDefault.visitString(s, index) + + override def visitTrue(index: Int): T = delegateDefault.visitTrue(index) + + override def visitFloat64StringParts(s: CharSequence, decIndex: Int, expIndex: Int, index: Int): T = + delegateDefault.visitFloat64StringParts(s, decIndex, expIndex, index) + + override def visitUInt64(i: Long, index: Int): T = delegateDefault.visitUInt64(i, index) + } + +case class Pickler[T](innerUpickle: TapirPickle[T], schema: Schema[T]): + def toCodec: JsonCodec[T] = { + import innerUpickle._ + given readWriter: innerUpickle.ReadWriter[T] = innerUpickle.rw + given schemaT: Schema[T] = schema + Codec.json[T] { s => + Try(read[T](s)) match { + case Success(v) => Value(v) + case Failure(e) => Error(s, JsonDecodeException(errors = List.empty, e)) + } + } { t => write(t) } + } + +object Pickler: + inline def derived[T: ClassTag](using Configuration, Mirror.Of[T]): Pickler[T] = + summonFrom { + case schema: Schema[T] => fromExistingSchema[T](schema) + case _ => fromMissingSchema[T] + } + + private inline def fromMissingSchema[T: ClassTag](using Configuration, Mirror.Of[T]): Pickler[T] = + // can badly affect perf, it's going to repeat derivation excessively + // the issue here is that deriving writers for nested CC fields requires schemas for these field types, and deriving each + // such schema derives all of its childschemas. Another problem is delivering schemas for the same type many times + given schema: Schema[T] = Schema.derived + fromExistingSchema(schema) + + implicit inline def primitivePickler[T](using Configuration, NotGiven[Mirror.Of[T]]): Pickler[T] = + Pickler(new DefaultReadWriterWrapper(summonInline[_root_.upickle.default.ReadWriter[T]]), summonInline[Schema[T]]) + + private inline def fromExistingSchema[T: ClassTag](schema: Schema[T])(using Configuration, Mirror.Of[T]): Pickler[T] = + summonFrom { + case foundRW: _root_.upickle.default.ReadWriter[T] => // there is BOTH schema and ReadWriter in scope + new Pickler[T](new DefaultReadWriterWrapper(foundRW), schema) + case _ => + buildReadWritersFromSchema(schema) + } + + private inline def buildReadWritersFromSchema[T: ClassTag](schema: Schema[T])(using m: Mirror.Of[T], c: Configuration): Pickler[T] = + // The lazy modifier is necessary for preventing infinite recursion in the derived instance for recursive types such as Lst + lazy val childPicklers = summonChildPicklerInstances[T, m.MirroredElemTypes] + inline m match { + case p: Mirror.ProductOf[T] => picklerProduct(p, schema, childPicklers) + case s: Mirror.SumOf[T] => picklerSum(s, schema, childPicklers) + } + + private inline def summonChildPicklerInstances[T: ClassTag, Fields <: Tuple](using Configuration): List[Pickler[?]] = + inline erasedValue[Fields] match { + case _: (fieldType *: fieldTypesTail) => deriveOrSummon[T, fieldType] :: summonChildPicklerInstances[T, fieldTypesTail] + case _: EmptyTuple => Nil + } + + private inline def deriveOrSummon[T, FieldType](using Configuration): Pickler[FieldType] = + inline erasedValue[FieldType] match + case _: T => deriveRec[T, FieldType] + case _ => summonInline[Pickler[FieldType]] + + private inline def deriveRec[T, FieldType](using config: Configuration): Pickler[FieldType] = + inline erasedValue[T] match + case _: FieldType => error("Infinite recursive derivation") + case _ => Pickler.derived[FieldType](using summonInline[ClassTag[FieldType]], config, summonInline[Mirror.Of[FieldType]]) + + // Extract child RWs from child picklers + // create a new RW from scratch using children rw and fields of the product + // use provided existing schema + // use data from schema to customize the new schema + private inline def picklerProduct[T: ClassTag](product: Mirror.ProductOf[T], schema: Schema[T], childPicklers: => List[Pickler[?]])(using Configuration): Pickler[T] = + println(s">>>>>>> pickler product for ${schema.name}") + new Pickler[T](null, schema) // TODO + + private inline def picklerSum[T: ClassTag](s: Mirror.SumOf[T], schema: Schema[T], childPicklers: => List[Pickler[?]]): Pickler[T] = + new Pickler[T](null, schema) // TODO + + implicit def picklerToCodec[T](using p: Pickler[T]): JsonCodec[T] = p.toCodec + +object generic { + object auto { // TODO move to appropriate place + inline implicit def picklerForCaseClass[T: ClassTag](implicit m: Mirror.Of[T], cfg: Configuration): Pickler[T] = Pickler.derived[T] + } +} diff --git a/json/upickle/src/test/scala-3/sttp/tapir/json/PicklerTest.scala b/json/upickle/src/test/scala-3/sttp/tapir/json/PicklerTest.scala new file mode 100644 index 0000000000..90ebe1d80c --- /dev/null +++ b/json/upickle/src/test/scala-3/sttp/tapir/json/PicklerTest.scala @@ -0,0 +1,45 @@ +package sttp.tapir.json + +import _root_.upickle.default._ +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import sttp.tapir.DecodeResult.Value +import sttp.tapir.Schema + +class PicklerTest extends AnyFlatSpec with Matchers { + behavior of "Pickler derivation" + + case class FlatClass(fieldA: Int, fieldB: String) + case class Level1TopClass(fieldA: String, fieldB: Level1InnerClass) + case class Level1InnerClass(fieldA: Int) + + it should "build from an existing Schema and ReadWriter" in { + // given schema and reader / writer in scope + given givenSchemaForCc: Schema[FlatClass] = Schema.derived[FlatClass] + given givenRwForCc: ReadWriter[FlatClass] = macroRW[FlatClass] + + // when + val derived = Pickler.derived[FlatClass] + val obj = derived.toCodec.decode("""{"fieldA": 654, "fieldB": "field_b_value"}""") + + // then + obj shouldBe Value(FlatClass(654, "field_b_value")) + } + + it should "build an instance for a flat case class" in { + // when + val derived = Pickler.derived[FlatClass] + println(derived.innerUpickle) + } + + it should "build an instance for a case class with a nested case class" in { + // when + import sttp.tapir.generic.auto._ // for Schema auto-derivation + import generic.auto._ // for Pickler auto-derivation + + val derived = Pickler.derived[Level1TopClass] + println(derived.innerUpickle) + } +} + +