Skip to content

Commit

Permalink
Scaffolding for pickler derivation
Browse files Browse the repository at this point in the history
  • Loading branch information
kciesielski committed Aug 24, 2023
1 parent 693087f commit 153acb0
Show file tree
Hide file tree
Showing 4 changed files with 195 additions and 1 deletion.
1 change: 1 addition & 0 deletions .scalafix.conf
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
OrganizeImports {
groupedImports = Merge
removeUnused = false
}
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ val commonSettings = commonSmlBuildSettings ++ ossPublishSettings ++ Seq(
}.value,
mimaPreviousArtifacts := Set.empty, // we only use MiMa for `core` for now, using enableMimaSettings
ideSkipProject := (scalaVersion.value == scala2_12) ||
(scalaVersion.value == scala3) ||
(scalaVersion.value == scala2_13) ||
thisProjectRef.value.project.contains("Native") ||
thisProjectRef.value.project.contains("JS"),
bspEnabled := !ideSkipProject.value,
Expand Down
148 changes: 148 additions & 0 deletions json/upickle/src/main/scala-3/sttp/tapir/json/Pickler.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
package sttp.tapir.json

import sttp.tapir.Codec.JsonCodec
import _root_.upickle.AttributeTagged
import sttp.tapir.Schema
import sttp.tapir.Codec
import scala.util.Try
import scala.util.Success
import sttp.tapir.DecodeResult.Error
import sttp.tapir.DecodeResult.Value
import scala.util.Failure
import sttp.tapir.DecodeResult.Error.JsonDecodeException
import _root_.upickle.core.Visitor
import _root_.upickle.core.ObjVisitor
import _root_.upickle.core.ArrVisitor
import scala.compiletime.*
import scala.deriving.Mirror
import scala.util.NotGiven
import scala.reflect.ClassTag
import sttp.tapir.generic.Configuration

trait TapirPickle[T] extends AttributeTagged:
def rw: this.ReadWriter[T]

abstract class TapirPickleBase[T] extends TapirPickle[T]

class DefaultReadWriterWrapper[T](delegateDefault: _root_.upickle.default.ReadWriter[T]) extends TapirPickleBase[T]:
def rw: this.ReadWriter[T] = new ReadWriter[T] {

override def visitArray(length: Int, index: Int): ArrVisitor[Any, T] = delegateDefault.visitArray(length, index)

override def visitFloat64String(s: String, index: Int): T = delegateDefault.visitFloat64String(s, index)

override def visitFloat32(d: Float, index: Int): T = delegateDefault.visitFloat32(d, index)

override def visitObject(length: Int, jsonableKeys: Boolean, index: Int): ObjVisitor[Any, T] =
delegateDefault.visitObject(length, jsonableKeys, index)

override def visitFloat64(d: Double, index: Int): T = delegateDefault.visitFloat64(d, index)

override def visitInt32(i: Int, index: Int): T = delegateDefault.visitInt32(i, index)

override def visitInt64(i: Long, index: Int): T = delegateDefault.visitInt64(i, index)

override def write0[V](out: Visitor[?, V], v: T): V = delegateDefault.write0(out, v)

override def visitBinary(bytes: Array[Byte], offset: Int, len: Int, index: Int): T =
delegateDefault.visitBinary(bytes, offset, len, index)

override def visitExt(tag: Byte, bytes: Array[Byte], offset: Int, len: Int, index: Int): T =
delegateDefault.visitExt(tag, bytes, offset, len, index)

override def visitNull(index: Int): T = delegateDefault.visitNull(index)

override def visitChar(s: Char, index: Int): T = delegateDefault.visitChar(s, index)

override def visitFalse(index: Int): T = delegateDefault.visitFalse(index)

override def visitString(s: CharSequence, index: Int): T = delegateDefault.visitString(s, index)

override def visitTrue(index: Int): T = delegateDefault.visitTrue(index)

override def visitFloat64StringParts(s: CharSequence, decIndex: Int, expIndex: Int, index: Int): T =
delegateDefault.visitFloat64StringParts(s, decIndex, expIndex, index)

override def visitUInt64(i: Long, index: Int): T = delegateDefault.visitUInt64(i, index)
}

case class Pickler[T](innerUpickle: TapirPickle[T], schema: Schema[T]):
def toCodec: JsonCodec[T] = {
import innerUpickle._
given readWriter: innerUpickle.ReadWriter[T] = innerUpickle.rw
given schemaT: Schema[T] = schema
Codec.json[T] { s =>
Try(read[T](s)) match {
case Success(v) => Value(v)
case Failure(e) => Error(s, JsonDecodeException(errors = List.empty, e))
}
} { t => write(t) }
}

object Pickler:
inline def derived[T: ClassTag](using Configuration, Mirror.Of[T]): Pickler[T] =
summonFrom {
case schema: Schema[T] => fromExistingSchema[T](schema)
case _ => fromMissingSchema[T]
}

private inline def fromMissingSchema[T: ClassTag](using Configuration, Mirror.Of[T]): Pickler[T] =
// can badly affect perf, it's going to repeat derivation excessively
// the issue here is that deriving writers for nested CC fields requires schemas for these field types, and deriving each
// such schema derives all of its childschemas. Another problem is delivering schemas for the same type many times
given schema: Schema[T] = Schema.derived
fromExistingSchema(schema)

implicit inline def primitivePickler[T](using Configuration, NotGiven[Mirror.Of[T]]): Pickler[T] =
Pickler(new DefaultReadWriterWrapper(summonInline[_root_.upickle.default.ReadWriter[T]]), summonInline[Schema[T]])

private inline def fromExistingSchema[T: ClassTag](schema: Schema[T])(using Configuration, Mirror.Of[T]): Pickler[T] =
summonFrom {
case foundRW: _root_.upickle.default.ReadWriter[T] => // there is BOTH schema and ReadWriter in scope
new Pickler[T](new DefaultReadWriterWrapper(foundRW), schema)
case _ =>
buildReadWritersFromSchema(schema)
}

private inline def buildReadWritersFromSchema[T: ClassTag](schema: Schema[T])(using m: Mirror.Of[T], c: Configuration): Pickler[T] =
// The lazy modifier is necessary for preventing infinite recursion in the derived instance for recursive types such as Lst
lazy val childPicklers = summonChildPicklerInstances[T, m.MirroredElemTypes]
inline m match {
case p: Mirror.ProductOf[T] => picklerProduct(p, schema, childPicklers)
case s: Mirror.SumOf[T] => picklerSum(s, schema, childPicklers)
}

private inline def summonChildPicklerInstances[T: ClassTag, Fields <: Tuple](using Configuration): List[Pickler[?]] =
inline erasedValue[Fields] match {
case _: (fieldType *: fieldTypesTail) => deriveOrSummon[T, fieldType] :: summonChildPicklerInstances[T, fieldTypesTail]
case _: EmptyTuple => Nil
}

private inline def deriveOrSummon[T, FieldType](using Configuration): Pickler[FieldType] =
inline erasedValue[FieldType] match
case _: T => deriveRec[T, FieldType]
case _ => summonInline[Pickler[FieldType]]

private inline def deriveRec[T, FieldType](using config: Configuration): Pickler[FieldType] =
inline erasedValue[T] match
case _: FieldType => error("Infinite recursive derivation")
case _ => Pickler.derived[FieldType](using summonInline[ClassTag[FieldType]], config, summonInline[Mirror.Of[FieldType]])

// Extract child RWs from child picklers
// create a new RW from scratch using children rw and fields of the product
// use provided existing schema
// use data from schema to customize the new schema
private inline def picklerProduct[T: ClassTag](product: Mirror.ProductOf[T], schema: Schema[T], childPicklers: => List[Pickler[?]])(using Configuration): Pickler[T] =
println(s">>>>>>> pickler product for ${schema.name}")
new Pickler[T](null, schema) // TODO

private inline def picklerSum[T: ClassTag](s: Mirror.SumOf[T], schema: Schema[T], childPicklers: => List[Pickler[?]]): Pickler[T] =
new Pickler[T](null, schema) // TODO

implicit def picklerToCodec[T](using p: Pickler[T]): JsonCodec[T] = p.toCodec

object generic {
object auto { // TODO move to appropriate place
inline implicit def picklerForCaseClass[T: ClassTag](implicit m: Mirror.Of[T], cfg: Configuration): Pickler[T] = Pickler.derived[T]
}
}
45 changes: 45 additions & 0 deletions json/upickle/src/test/scala-3/sttp/tapir/json/PicklerTest.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
package sttp.tapir.json

import _root_.upickle.default._
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import sttp.tapir.DecodeResult.Value
import sttp.tapir.Schema

class PicklerTest extends AnyFlatSpec with Matchers {
behavior of "Pickler derivation"

case class FlatClass(fieldA: Int, fieldB: String)
case class Level1TopClass(fieldA: String, fieldB: Level1InnerClass)
case class Level1InnerClass(fieldA: Int)

it should "build from an existing Schema and ReadWriter" in {
// given schema and reader / writer in scope
given givenSchemaForCc: Schema[FlatClass] = Schema.derived[FlatClass]
given givenRwForCc: ReadWriter[FlatClass] = macroRW[FlatClass]

// when
val derived = Pickler.derived[FlatClass]
val obj = derived.toCodec.decode("""{"fieldA": 654, "fieldB": "field_b_value"}""")

// then
obj shouldBe Value(FlatClass(654, "field_b_value"))
}

it should "build an instance for a flat case class" in {
// when
val derived = Pickler.derived[FlatClass]
println(derived.innerUpickle)
}

it should "build an instance for a case class with a nested case class" in {
// when
import sttp.tapir.generic.auto._ // for Schema auto-derivation
import generic.auto._ // for Pickler auto-derivation

val derived = Pickler.derived[Level1TopClass]
println(derived.innerUpickle)
}
}


0 comments on commit 153acb0

Please sign in to comment.