diff --git a/.scalafix.conf b/.scalafix.conf index ab83dd6e1c..ebe46a037e 100644 --- a/.scalafix.conf +++ b/.scalafix.conf @@ -1,3 +1,4 @@ OrganizeImports { groupedImports = Merge + removeUnused = false } diff --git a/build.sbt b/build.sbt index 1a8c355e4f..97d1d94a71 100644 --- a/build.sbt +++ b/build.sbt @@ -67,7 +67,7 @@ val commonSettings = commonSmlBuildSettings ++ ossPublishSettings ++ Seq( }.value, mimaPreviousArtifacts := Set.empty, // we only use MiMa for `core` for now, using enableMimaSettings ideSkipProject := (scalaVersion.value == scala2_12) || - (scalaVersion.value == scala3) || + (scalaVersion.value == scala2_13) || thisProjectRef.value.project.contains("Native") || thisProjectRef.value.project.contains("JS"), bspEnabled := !ideSkipProject.value, @@ -179,6 +179,7 @@ lazy val rawAllAggregates = core.projectRefs ++ zioMetrics.projectRefs ++ json4s.projectRefs ++ playJson.projectRefs ++ + picklerJson.projectRefs ++ sprayJson.projectRefs ++ uPickleJson.projectRefs ++ tethysJson.projectRefs ++ @@ -861,6 +862,19 @@ lazy val uPickleJson: ProjectMatrix = (projectMatrix in file("json/upickle")) ) .dependsOn(core) +lazy val picklerJson: ProjectMatrix = (projectMatrix in file("json/pickler")) + .settings(commonSettings) + .settings( + name := "tapir-json-pickler", + libraryDependencies ++= Seq( + "com.lihaoyi" %%% "upickle" % Versions.upickle, + scalaTest.value % Test + ) + ) + .jvmPlatform(scalaVersions = List(scala3)) + .jsPlatform(scalaVersions = List(scala3)) + .dependsOn(core % "compile->compile;test->test") + lazy val tethysJson: ProjectMatrix = (projectMatrix in file("json/tethys")) .settings(commonSettings) .settings( @@ -2043,9 +2057,12 @@ lazy val examples3: ProjectMatrix = (projectMatrix in file("examples3")) ) .jvmPlatform(scalaVersions = List(scala3)) .dependsOn( + circeJson, http4sServer, + nettyServer, + picklerJson, + sttpClient, swaggerUiBundle, - circeJson ) //TODO this should be invoked by compilation process, see #https://github.com/scalameta/mdoc/issues/355 diff --git a/core/src/main/scala-3/sttp/tapir/macros/SchemaMacros.scala b/core/src/main/scala-3/sttp/tapir/macros/SchemaMacros.scala index 1eda2d5d6d..83312ee972 100644 --- a/core/src/main/scala-3/sttp/tapir/macros/SchemaMacros.scala +++ b/core/src/main/scala-3/sttp/tapir/macros/SchemaMacros.scala @@ -199,6 +199,7 @@ private[tapir] object SchemaCompanionMacros { case Block(List(defdef), _) => resolveFunctionName(defdef) case DefDef(_, _, _, Some(body)) => resolveFunctionName(body) case Apply(fun, _) => resolveFunctionName(fun) + case Ident(str) => str case Select(_, kind) => kind } diff --git a/doc/endpoint/json.md b/doc/endpoint/json.md index 0bc1dac7ea..9a02d9f1da 100644 --- a/doc/endpoint/json.md +++ b/doc/endpoint/json.md @@ -15,10 +15,17 @@ better error reporting, in case one of the components required to create the jso .. note:: Note that the process of deriving schemas, and deriving library-specific json encoders and decoders is entirely - separate. The first is controlled by tapir, the second - by the json library. Any customisation, e.g. for field - naming or inheritance strategies, must be done separately for both derivations. + separate (unless you're using the pickler module - see below). The first is controlled by tapir, the second - by the + json library. Any customisation, e.g. for field naming or inheritance strategies, must be done separately for both + derivations. ``` +## Pickler + +Alternatively, instead of deriving schemas and library-specific json encoders and decoders separately, you can use +the experimental [pickler](pickler.md) module, which takes care of both derivation in a consistent way, which allows +customization with a single, common configuration API. + ## Implicit json codecs If you have a custom, implicit `Codec[String, T, Json]` instance, you should use the `customCodecJsonBody[T]` method instead. diff --git a/doc/endpoint/pickler.md b/doc/endpoint/pickler.md new file mode 100644 index 0000000000..63367f1477 --- /dev/null +++ b/doc/endpoint/pickler.md @@ -0,0 +1,192 @@ +# JSON Pickler + +Pickler is an experimental module that simplifies working with JSON, using a consistent configuration API to provide both accurate endpoint documentation and server or client-side encoding/decoding. + +In [other](json.md) tapir-JSON integrations, you have to keep the `Schema` (which is used for documentation) in sync with the library-specific configuration of JSON encoders/decoders. The more customizations you need, like special field name encoding, or preferred way to represent sealed hierarchies, the more configuration you need to repeat (which is specific to the chosen library, like µPickle, Circe, etc.). + +`Pickler[T]` takes care of this, generating a consistent pair of `Schema[T]` and `JsonCodec[T]`, with single point of customization. Underneath it uses [µPickle](http://www.lihaoyi.com/upickle/) as its specific library for handling JSON, but it aims to keep it as an implementation detail. + +To use pickler, add the following dependency to your project: + +```scala +"com.softwaremill.sttp.tapir" %% "tapir-json-pickler" % "@VERSION@" +``` + +Please note that it is available only for Scala 3 and Scala.JS 3. + +## Semi-automatic derivation + +A pickler can be derived directly using `Pickler.derived[T]`. This will derive both schema and `JsonCodec[T]`: + +```scala +import sttp.tapir.json.pickler.* + +case class Book(author: String, title: String, year: Int) + +val pickler: Pickler[Book] = Pickler.derived +val codec: JsonCodec[Book] = pickler.toCodec +val bookJsonStr = // { "author": "Herman Melville", "title": Moby Dick", "year": 1851 } + codec.encode(Book("Herman Melville", "Moby Dick", 1851)) +``` + +A `given` pickler in scope makes it available for `jsonQuery`, `jsonBody` and `jsonBodyWithRaw`, which need to be imported from the `sttp.tapir.json.pickler` package. For example: + +```scala +import sttp.tapir.* +import sttp.tapir.json.pickler.* + +case class Book(author: String, title: String, year: Int) + +given Pickler[Book] = Pickler.derived + +val addBook: PublicEndpoint[Book, Unit, Unit, Any] = + endpoint + .in("books") + .in("add") + .in(jsonBody[Book].description("The book to add")) +``` + +A pickler also be derived using the `derives` keyword directly on a class: + +```scala +import sttp.tapir.json.pickler.* + +case class Book(author: String, title: String, year: Int) derives Pickler +val pickler: Pickler[Book] = summon[Pickler] +``` + +Picklers for primitive types are available out-of-the-box. For more complex hierarchies, like nested `case class` structures or `enum`s, you'll need to provide picklers for all children (fields, enum cases etc.). Alternatively, you can use automatic derivation described below. + +## Automatic derivation + +Picklers can be derived at usage side, when required, by adding the auto-derivation import: + +```scala +import sttp.tapir.json.pickler.* +import sttp.tapir.json.pickler.generic.auto.* + +enum Country: + case India + case Bhutan + +case class Address(street: String, zipCode: String, country: Country) +case class Person(name: String, address: Address) + +val pickler: Pickler[Person] = summon[Pickler[Person]] +``` + +However, this can negatively impact compilation performance, as the same pickler might be derived multiple times, for each usage of a type. This can be improved by explicitly providing picklers (as described in the semi-auto section above) either for all, or selected types. It's important then to make sure that the manually-provided picklers are in the implicit scope at the usage sites. + +## Configuring pickler derivation + +It is possible to configure schema and codec derivation by providing an implicit `sttp.tapir.generic.Configuration`, just as for standalone [schema derivation](schemas.md). This configuration allows switching field naming policy to `snake_case`, `kebab_case`, or an arbitrary transformation function, as well as setting the field name for the coproduct (sealed hierarchy) type discriminator, which is discussed in details in further sections. + +```scala +import sttp.tapir.generic.Configuration + +given customConfiguration: Configuration = Configuration.default.withSnakeCaseMemberNames +``` + +## Enums / sealed traits / coproducts + +Pickler derivation for coproduct types (enums / sealed hierarchies) works automatically, by adding an `$type` discriminator field with the full class name. This is the default behavior of uPickle, but it can be overridden either by changing the discriminator field name, or by using custom logic to get field value from base trait. + +A discriminator field can be specified for coproducts by providing it in the configuration; this will be only used during automatic and semi-automatic derivation: + +```scala +import sttp.tapir.generic.Configuration + +given customConfiguration: Configuration = + Configuration.default.withDiscriminator("who_am_i") +``` + +The discriminator will be added as a field to all coproduct child codecs and schemas, if it’s not yet present. The schema of the added field will always be a Schema.string. Finally, the mapping between the discriminator field values and the child schemas will be generated using `Configuration.toDiscriminatorValue(childSchemaName)`. + +Finally, if the discriminator is a field that’s defined on the base trait (and hence in each implementation), the schemas can be specified as a custom implicit value using the `Pickler.oneOfUsingField` macro, for example (this will also generate the appropriate mappings): + +```scala +sealed trait Entity { + def kind: String +} +case class Person(firstName: String, lastName: String) extends Entity { + def kind: String = "person" +} +case class Organization(name: String) extends Entity { + def kind: String = "org" +} + +import sttp.tapir.json.pickler.* + +val pPerson = Pickler.derived[Person] +val pOrganization = Pickler.derived[Organization] +given pEntity: Pickler[Entity] = + Pickler.oneOfUsingField[Entity, String](_.kind, _.toString) + ("person" -> pPerson, "org" -> pOrganization) + +// { "$type": "person", "firstName": "Jessica", "lastName": "West" } +pEntity.toCodec.encode(Person("Jessica", "West")) +``` + +## Customising derived schemas + +Schemas generated by picklers can be customized using annotations, just like with traditional schema derivation (see [here](schemas.html#using-annotations)). Some annotations automatically affect JSON codes: + +* `@encodedName` determines JSON field name +* `@default` sets default value if the field is missing in JSON + +## Enumerations + +Scala 3 `enums`, where all cases are parameterless, are treated as an enumeration (not as a coproduct / sealed hierarchy). They are also automatically handled by `Pickler.derived[T]`: enum values are encoded as simple strings representing the type name. For example: + +```scala +import sttp.tapir.json.pickler.* + +enum ColorEnum: + case Green, Pink + +case class ColorResponse(color: ColorEnum, description: String) + +given Pickler[ColorEnum] = Pickler.derived +val pResponse = Pickler.derived[ColorResponse] + +// { "color": "Pink", "description": "Pink desc" } +pResponse.toCodec.encode( + ColorResponse(ColorEnum.Pink, "Pink desc") +) +// Enumeration schema with proper validator +pResponse.schema +``` + +If you need to customize enum value encoding, use `Pickler.derivedEnumeration[T]`: + +```scala +import sttp.tapir.json.pickler.* + +enum ColorEnum: + case Green, Pink + +case class ColorResponse(color: ColorEnum, description: String) + +given Pickler[ColorEnum] = Pickler + .derivedEnumeration[ColorEnum] + .customStringBased(_.ordinal.toString) + +val pResponse = Pickler.derived[ColorResponse] + +// { "color": "1", "description": "Pink desc" } +pResponse.toCodec.encode( + ColorResponse(ColorEnum.Pink, "Pink desc") +) +// Enumeration schema with proper validator +pResponse.schema +``` + +## Using existing µPickle Readers and Writers + +If you have a case where you would like to use an already defined `upickle.default.ReadWriter[T]`, you can still derive a `Pickler[T]`, but you have to provide both your `ReadWriter[T]` and a `Schema[T]` in the given (implicit) scope. With such a setup, you can proceed with `Pickler.derived[T]`. + +## Divergences from default µPickle behavior + +* Tapir pickler serialises None values as `null`, instead of wrapping the value in an array +* Value classes (case classes extending AnyVal) will be serialised as simple values + diff --git a/doc/endpoint/schemas.md b/doc/endpoint/schemas.md index d785499829..c74ec3402f 100644 --- a/doc/endpoint/schemas.md +++ b/doc/endpoint/schemas.md @@ -126,7 +126,11 @@ will be represented as a coproduct which contains a list of child schemas, witho Note that whichever approach you choose to define the coproduct schema, it has to match the way the value is encoded and decoded by the codec. E.g. when the schema is for a json body, the discriminator must be separately - configured in the json library, matching the configuration of the schema. + configured in the json library, matching the configuration of the schema. + + Alternatively, instead of deriving schemas and json codecs separately, you can use the experimental + `pickler `_ + module, which provides a higher level ``Pickler`` concept, which takes care of consistent derivation. ``` ### Field discriminators diff --git a/doc/stability.md b/doc/stability.md index 96599e822f..f2faef89ff 100644 --- a/doc/stability.md +++ b/doc/stability.md @@ -81,6 +81,7 @@ The modules are categorised using the following levels: | spray-json | stabilising | | tethys | stabilising | | upickle | stabilising | +| pickler | experimental | | zio-json | experimental | | zio1-json | experimental | diff --git a/examples3/src/main/scala/sttp/tapir/examples3/BooksPicklerExample.scala b/examples3/src/main/scala/sttp/tapir/examples3/BooksPicklerExample.scala new file mode 100644 index 0000000000..2d31dfa8d1 --- /dev/null +++ b/examples3/src/main/scala/sttp/tapir/examples3/BooksPicklerExample.scala @@ -0,0 +1,174 @@ +package sttp.tapir.examples3 + +import com.typesafe.scalalogging.StrictLogging +import sttp.tapir.server.netty.{NettyFutureServer, NettyFutureServerBinding} + +import scala.concurrent.Await +import scala.concurrent.duration.Duration + +object BooksPicklerExample extends App with StrictLogging { + type Limit = Option[Int] + type AuthToken = String + + case class Country(name: String) + case class Author(name: String, country: Country) + case class Genre(name: String, description: String) + case class Book(title: String, genre: Genre, year: Int, author: Author) + case class BooksQuery(genre: Option[String], limit: Limit) + + val declaredPort = 9090 + val declaredHost = "localhost" + + /** Descriptions of endpoints used in the example. + */ + object Endpoints { + import sttp.tapir.* + import sttp.tapir.json.pickler.* + import sttp.tapir.json.pickler.generic.auto.* + + // All endpoints report errors as strings, and have the common path prefix '/books' + private val baseEndpoint = endpoint.errorOut(stringBody).in("books") + + // The path for this endpoint will be '/books/add', as we are using the base endpoint + val addBook: PublicEndpoint[(Book, AuthToken), String, Unit, Any] = baseEndpoint.post + .in("add") + .in( + jsonBody[Book] + .description("The book to add") + .example(Book("Pride and Prejudice", Genre("Novel", ""), 1813, Author("Jane Austen", Country("United Kingdom")))) + ) + .in(header[AuthToken]("X-Auth-Token").description("The token is 'secret'")) + + // Re-usable parameter description + private val limitParameter = query[Option[Int]]("limit").description("Maximum number of books to retrieve") + + val booksListing: PublicEndpoint[Limit, String, Vector[Book], Any] = baseEndpoint.get + .in("list" / "all") + .in(limitParameter) + .out(jsonBody[Vector[Book]]) + + val booksListingByGenre: PublicEndpoint[BooksQuery, String, Vector[Book], Any] = baseEndpoint.get + .in(("list" / path[String]("genre").map(Option(_))(_.get)).and(limitParameter).mapTo[BooksQuery]) + .out(jsonBody[Vector[Book]]) + } + // + + object Library { + import java.util.concurrent.atomic.AtomicReference + + val Books = new AtomicReference( + Vector( + Book( + "The Sorrows of Young Werther", + Genre("Novel", "Novel is genre"), + 1774, + Author("Johann Wolfgang von Goethe", Country("Germany")) + ), + Book("Iliad", Genre("Poetry", ""), -8000, Author("Homer", Country("Greece"))), + Book("Nad Niemnem", Genre("Novel", ""), 1888, Author("Eliza Orzeszkowa", Country("Poland"))), + Book("The Colour of Magic", Genre("Fantasy", ""), 1983, Author("Terry Pratchett", Country("United Kingdom"))), + Book("The Art of Computer Programming", Genre("Non-fiction", ""), 1968, Author("Donald Knuth", Country("USA"))), + Book("Pharaoh", Genre("Novel", ""), 1897, Author("Boleslaw Prus", Country("Poland"))) + ) + ) + + def getBooks(query: BooksQuery): Vector[Book] = { + val allBooks = Books.get() + val limitedBooks = query.limit match { + case None => allBooks + case Some(l) => allBooks.take(l) + } + val filteredBooks = query.genre match { + case None => limitedBooks + case Some(g) => limitedBooks.filter(_.genre.name.equalsIgnoreCase(g)) + } + filteredBooks + } + } + + // + + import Endpoints.* + import sttp.tapir.server.ServerEndpoint + import scala.concurrent.Future + import scala.concurrent.ExecutionContext.Implicits.global + + def booksServerEndpoints: List[ServerEndpoint[Any, Future]] = { + + def bookAddLogic(book: Book, token: AuthToken): Future[Either[String, Unit]] = + Future { + if (token != "secret") { + logger.warn(s"Tried to access with token: $token") + Left("Unauthorized access!!!11") + } else { + logger.info(s"Adding book $book") + Library.Books.getAndUpdate(books => books :+ book) + Right(()) + } + } + + def bookListingLogic(limit: Limit): Future[Either[String, Vector[Book]]] = + Future { + Right[String, Vector[Book]](Library.getBooks(BooksQuery(None, limit))) + } + + def bookListingByGenreLogic(query: BooksQuery): Future[Either[String, Vector[Book]]] = + Future { + Right[String, Vector[Book]](Library.getBooks(query)) + } + + // interpreting the endpoint description and converting it to an akka-http route, providing the logic which + // should be run when the endpoint is invoked. + List( + addBook.serverLogic((bookAddLogic _).tupled), + booksListing.serverLogic(bookListingLogic), + booksListingByGenre.serverLogic(bookListingByGenreLogic) + ) + } + + def swaggerUIServerEndpoints: List[ServerEndpoint[Any, Future]] = { + import sttp.tapir.swagger.bundle.SwaggerInterpreter + + // interpreting the endpoint descriptions as yaml openapi documentation + // exposing the docs using SwaggerUI endpoints, interpreted as an akka-http route + SwaggerInterpreter().fromEndpoints(List(addBook), "The Tapir Library", "1.0") + } + + def makeClientRequest(): Unit = { + import sttp.client3.* + import sttp.tapir.client.sttp.SttpClientInterpreter + val client = SttpClientInterpreter().toQuickClient(booksListing, Some(uri"http://$declaredHost:$declaredPort")) + + val result: Either[String, Vector[Book]] = client(Some(3)) + logger.info("Result of listing request with limit 3: " + result) + } + + + logger.info("Welcome to the Tapir Library example!") + + logger.info("Starting the server ...") + + // Starting netty server + val serverBinding: NettyFutureServerBinding = + Await.result( + NettyFutureServer() + .port(declaredPort) + .host(declaredHost) + .addEndpoints(booksServerEndpoints ++ swaggerUIServerEndpoints) + .start(), + Duration.Inf + ) + + // Bind and start to accept incoming connections. + val port = serverBinding.port + val host = serverBinding.hostName + println(s"Server started at port = ${serverBinding.port}") + + logger.info("Making a request to the listing endpoint ...") + makeClientRequest() + + logger.info(s"Try out the API by opening the Swagger UI: http://$declaredHost:$declaredPort/docs") + logger.info("Press ENTER to stop the server...") + scala.io.StdIn.readLine + Await.result(serverBinding.stop(), Duration.Inf) +} diff --git a/json/pickler/src/main/scala/sttp/tapir/json/pickler/CreateDerivedEnumerationPickler.scala b/json/pickler/src/main/scala/sttp/tapir/json/pickler/CreateDerivedEnumerationPickler.scala new file mode 100644 index 0000000000..cbb75c4bb3 --- /dev/null +++ b/json/pickler/src/main/scala/sttp/tapir/json/pickler/CreateDerivedEnumerationPickler.scala @@ -0,0 +1,59 @@ +package sttp.tapir.json.pickler + +import sttp.tapir.generic.Configuration +import sttp.tapir.macros.CreateDerivedEnumerationSchema +import sttp.tapir.{Schema, SchemaAnnotations, SchemaType, Validator} + +import scala.deriving.Mirror +import scala.reflect.ClassTag + +/** A builder allowing deriving Pickler for enums, used by [[Pickler.derivedEnumeration]]. Can be used to set non-standard encoding logic, + * schema type or default value for an enum. + */ +class CreateDerivedEnumerationPickler[T: ClassTag]( + validator: Validator.Enumeration[T], + schemaAnnotations: SchemaAnnotations[T] +): + + /** @param encode + * Specify how values of this type can be encoded to a raw value (typically a [[String]]; the raw form should correspond with + * `schemaType`). This encoding will be used when writing/reading JSON and generating documentation. Defaults to an identity function, + * which effectively means that `.toString` will be used to represent the enumeration in the docs. + * @param schemaType + * The low-level representation of the enumeration. Defaults to a string. + */ + inline def apply( + encode: Option[T => Any] = Some(identity), + schemaType: SchemaType[T] = SchemaType.SString[T](), + default: Option[T] = None + )(using m: Mirror.Of[T]): Pickler[T] = { + val schema: Schema[T] = new CreateDerivedEnumerationSchema(validator, schemaAnnotations).apply( + encode, + schemaType, + default + ) + given Configuration = Configuration.default + given SubtypeDiscriminator[T] = EnumValueDiscriminator[T]( + encode.map(_.andThen(_.toString)).getOrElse(_.toString), + validator + ) + lazy val childPicklers: Tuple.Map[m.MirroredElemTypes, Pickler] = Pickler.summonChildPicklerInstances[T, m.MirroredElemTypes] + Pickler.picklerSum(schema, childPicklers) + } + + /** Creates the Pickler assuming the low-level representation is a `String`. The encoding function passes the object unchanged (which + * means `.toString` will be used to represent the enumeration in JSON and documentation). Typically you don't need to explicitly use + * `Pickler.derivedEnumeration[T].defaultStringBased`, as this is the default behavior of [[Pickler.derived]] for enums. + */ + inline def defaultStringBased(using Mirror.Of[T]) = apply() + + /** Creates the Pickler assuming the low-level representation is a `String`. Provide your custom encoding function for representing an + * enum value as a String. It will be used to represent the enumeration in JSON and documentation. This approach is recommended if you + * need to encode enums using a common field in their base trait, or another specific logic for extracting string representation. + */ + inline def customStringBased(encode: T => String)(using Mirror.Of[T]): Pickler[T] = + apply( + Some(encode), + schemaType = SchemaType.SString[T](), + default = None + ) diff --git a/json/pickler/src/main/scala/sttp/tapir/json/pickler/Pickler.scala b/json/pickler/src/main/scala/sttp/tapir/json/pickler/Pickler.scala new file mode 100644 index 0000000000..b87588753c --- /dev/null +++ b/json/pickler/src/main/scala/sttp/tapir/json/pickler/Pickler.scala @@ -0,0 +1,435 @@ +package sttp.tapir.json.pickler + +import sttp.tapir.Codec.JsonCodec +import sttp.tapir.DecodeResult.Error.JsonDecodeException +import sttp.tapir.DecodeResult.{Error, Value} +import sttp.tapir.SchemaType.SProduct +import sttp.tapir.generic.Configuration +import sttp.tapir.{Codec, Schema, SchemaAnnotations, Validator} + +import scala.collection.Factory +import scala.compiletime.* +import scala.deriving.Mirror +import scala.quoted.* +import scala.reflect.ClassTag +import scala.util.{Failure, NotGiven, Success, Try} +import java.math.{BigDecimal as JBigDecimal, BigInteger as JBigInteger} +import macros.* + +import scala.annotation.implicitNotFound + +object Pickler: + + /** Derive a [[Pickler]] instance for the given type, at compile-time. Depending on the derivation mode (auto / semi-auto), picklers for + * referenced types (e.g. via a field, enum case or subtype) will either be derived automatically, or will need to be provided manually. + * + * This method can either be used explicitly, in the definition of a `given`, or indirectly by adding a `... derives Pickler` modifier to + * a datatype definition. + * + * The in-scope [[Configuration]] instance is used to customise field names and other behavior. + */ + inline def derived[T: ClassTag](using Configuration, Mirror.Of[T]): Pickler[T] = + summonFrom { + case schema: Schema[T] => fromExistingSchemaAndRw[T](schema) + case _ => buildNewPickler[T]() + } + + /** Create a coproduct pickler (e.g. for an `enum` or `sealed trait`), where the value of the discriminator between child types is a read + * of a field of the base type. The field, if not yet present, is added to each child schema. + * + * The picklers for the child types have to be provided explicitly with their value mappings in `mapping`. + * + * Note that if the discriminator value is some transformation of the child's type name (obtained using the implicit [[Configuration]]), + * the coproduct schema can be derived automatically or semi-automatically. + * + * @param discriminatorPickler + * The pickler that is used when adding the discriminator as a field to child picklers (if it's not yet added). + */ + inline def oneOfUsingField[T: ClassTag, V](extractor: T => V, asString: V => String)( + mapping: (V, Pickler[_ <: T])* + )(using m: Mirror.Of[T], c: Configuration, discriminatorPickler: Pickler[V]): Pickler[T] = + + val paramExtractor = extractor + val paramAsString = asString + val paramMapping = mapping + type ParamV = V + given subtypeDiscriminator: SubtypeDiscriminator[T] = new CustomSubtypeDiscriminator[T] { + type V = ParamV + override def extractor = paramExtractor + override def asString = paramAsString + override lazy val mapping = paramMapping + } + summonFrom { + case schema: Schema[T] => fromExistingSchemaAndRw[T](schema) + case _ => + inline m match { + case p: Mirror.ProductOf[T] => + error( + s"Unexpected product type (case class) ${implicitly[ClassTag[T]].runtimeClass.getSimpleName()}, this method should only be used with sum types (like sealed hierarchy)" + ) + case _: Mirror.SumOf[T] => + inline if (isScalaEnum[T]) + error("oneOfUsingField cannot be used with enums. Try Pickler.derivedEnumeration instead.") + else { + given schemaV: Schema[V] = discriminatorPickler.schema + val schema: Schema[T] = Schema.oneOfUsingField[T, V](extractor, asString)( + mapping.toList.map { case (v, p) => + (v, p.schema) + }: _* + ) + lazy val childPicklers: Tuple.Map[m.MirroredElemTypes, Pickler] = summonChildPicklerInstances[T, m.MirroredElemTypes] + picklerSum(schema, childPicklers) + } + } + } + + /** Creates a pickler for an enumeration, where the validator is derived using [[sttp.tapir.Validator.derivedEnumeration]]. This requires + * that this is an `enum`, where all cases are parameterless, or that all subtypes of the sealed hierarchy `T` are `object`s. + * + * This method cannot be a `given`, as there's no way to constraint the type `T` to be an enum / sealed trait or class enumeration, so + * that this would be invoked only when necessary. + */ + inline def derivedEnumeration[T: ClassTag](using Mirror.Of[T]): CreateDerivedEnumerationPickler[T] = + inline erasedValue[T] match + case _: Null => + error("Unexpected non-enum Null passed to derivedEnumeration") + case _: Nothing => + error("Unexpected non-enum Nothing passed to derivedEnumeration") + case _: reflect.Enum => + new CreateDerivedEnumerationPickler(Validator.derivedEnumeration[T], SchemaAnnotations.derived[T]) + case _ => + error("Unexpected non-enum type passed to derivedEnumeration") + + inline given nonMirrorPickler[T](using Configuration, NotGiven[Mirror.Of[T]]): Pickler[T] = + summonFrom { + // It turns out that summoning a Pickler can sometimes fall into this branch, even if we explicitly state that we wan't a NotGiven in the method signature + case m: Mirror.Of[T] => + errorForType[T]("Failed to summon a Pickler[%s]. Try using Pickler.derived or importing sttp.tapir.json.pickler.generic.auto.*") + case n: NotGiven[Mirror.Of[T]] => + Pickler( + new TapirPickle[T] { + // Relying on given writers and readers provided by uPickle Writers and Readers base traits + // They should take care of deriving for Int, String, Boolean, Option, List, Map, Array, etc. + override lazy val reader = summonInline[Reader[T]] + override lazy val writer = summonInline[Writer[T]] + }, + summonInline[Schema[T]] + ) + } + + given picklerForOption[T: Pickler](using Configuration, Mirror.Of[T]): Pickler[Option[T]] = + summon[Pickler[T]].asOption + + given picklerForIterable[T: Pickler, C[X] <: Iterable[X]](using Configuration, Mirror.Of[T], Factory[T, C[T]]): Pickler[C[T]] = + summon[Pickler[T]].asIterable[C] + + given picklerForEither[A, B](using pa: Pickler[A], pb: Pickler[B]): Pickler[Either[A, B]] = + given Schema[A] = pa.schema + given Schema[B] = pb.schema + val newSchema = summon[Schema[Either[A, B]]] + + new Pickler[Either[A, B]]( + new TapirPickle[Either[A, B]] { + given Reader[A] = pa.innerUpickle.reader.asInstanceOf[Reader[A]] + given Writer[A] = pa.innerUpickle.writer.asInstanceOf[Writer[A]] + given Reader[B] = pb.innerUpickle.reader.asInstanceOf[Reader[B]] + given Writer[B] = pb.innerUpickle.writer.asInstanceOf[Writer[B]] + override lazy val writer = summon[Writer[Either[A, B]]] + override lazy val reader = summon[Reader[Either[A, B]]] + }, + newSchema + ) + + given picklerForArray[T: Pickler: ClassTag]: Pickler[Array[T]] = + summon[Pickler[T]].asArray + + inline given picklerForStringMap[V](using pv: Pickler[V]): Pickler[Map[String, V]] = + given Schema[V] = pv.schema + val newSchema = Schema.schemaForMap[V] + new Pickler[Map[String, V]]( + new TapirPickle[Map[String, V]] { + given Reader[V] = pv.innerUpickle.reader.asInstanceOf[Reader[V]] + given Writer[V] = pv.innerUpickle.writer.asInstanceOf[Writer[V]] + override lazy val writer = summon[Writer[Map[String, V]]] + override lazy val reader = summon[Reader[Map[String, V]]] + }, + newSchema + ) + + /** Create a pickler for a map with arbitrary keys. The pickler for the keys (`Pickler[K]`) should be string-like (that is, the schema + * type should be [[sttp.tapir.SchemaType.SString]]), however this cannot be verified at compile-time and is not verified at run-time. + * + * The given `keyToString` conversion function is used during validation. + * + * If you'd like this pickler to be available as a given type of keys, create an custom implicit, e.g.: + * + * {{{ + * case class MyKey(value: String) extends AnyVal + * given picklerForMyMap: Pickler[Map[MyKey, MyValue]] = Pickler.picklerForMap[MyKey, MyValue](_.value) + * }}} + */ + inline def picklerForMap[K, V](keyToString: K => String)(using pk: Pickler[K], pv: Pickler[V]): Pickler[Map[K, V]] = + given Schema[V] = pv.schema + val newSchema = Schema.schemaForMap[K, V](keyToString) + new Pickler[Map[K, V]]( + new TapirPickle[Map[K, V]] { + given Reader[K] = pk.innerUpickle.reader.asInstanceOf[Reader[K]] + given Writer[K] = pk.innerUpickle.writer.asInstanceOf[Writer[K]] + given Reader[V] = pv.innerUpickle.reader.asInstanceOf[Reader[V]] + given Writer[V] = pv.innerUpickle.writer.asInstanceOf[Writer[V]] + override lazy val writer = summon[Writer[Map[K, V]]] + override lazy val reader = summon[Reader[Map[K, V]]] + }, + newSchema + ) + + given Pickler[JBigDecimal] = new Pickler[JBigDecimal]( + new TapirPickle[JBigDecimal] { + override lazy val writer = summon[Writer[BigDecimal]].comap(jBd => BigDecimal(jBd)) + override lazy val reader = summon[Reader[BigDecimal]].map(bd => bd.bigDecimal) + }, + summon[Schema[JBigDecimal]] + ) + + given Pickler[JBigInteger] = new Pickler[JBigInteger]( + new TapirPickle[JBigInteger] { + override lazy val writer = summon[Writer[BigInt]].comap(jBi => BigInt(jBi)) + override lazy val reader = summon[Reader[BigInt]].map(bi => bi.bigInteger) + }, + summon[Schema[JBigInteger]] + ) + + inline given picklerForAnyVal[T <: AnyVal]: Pickler[T] = ${ picklerForAnyValImpl[T] } + + // + + private inline def errorForType[T](inline template: String): Null = ${ errorForTypeImpl[T]('template) } + + private def errorForTypeImpl[T: Type](template: Expr[String])(using Quotes): Expr[Null] = { + import quotes.reflect.* + val templateStr = template.valueOrAbort + val typeName = TypeRepr.of[T].show + report.error(String.format(templateStr, typeName)) + '{ null } + } + + private def picklerForAnyValImpl[T: Type](using quotes: Quotes): Expr[Pickler[T]] = + import quotes.reflect.* + val tpe = TypeRepr.of[T] + + val isValueCaseClass = + tpe.typeSymbol.isClassDef && tpe.classSymbol.get.flags.is(Flags.Case) && tpe.baseClasses.contains(Symbol.classSymbol("scala.AnyVal")) + + if (!isValueCaseClass) { + '{ nonMirrorPickler[T] } + } else { + + val field = tpe.typeSymbol.declaredFields.head + val fieldTpe = tpe.memberType(field) + fieldTpe.asType match + case '[f] => + val basePickler = Expr.summon[Pickler[f]].getOrElse { + report.errorAndAbort( + s"Cannot summon Pickler for value class ${tpe.show}. Missing Pickler[${fieldTpe.show}] in implicit scope." + ) + } + '{ + val newSchema: Schema[T] = ${ basePickler }.schema.as[T] + new Pickler[T]( + new TapirPickle[T] { + override lazy val writer = summonInline[Writer[f]].comap[T]( + // writing object of type T means writing T.field + ccObj => ${ Select.unique(('ccObj).asTerm, field.name).asExprOf[f] } + ) + // a reader of type f (field) will read it and wrap into value object using the consutructor of T + override lazy val reader = summonInline[Reader[f]] + .map[T](fieldObj => ${ Apply(Select.unique(New(Inferred(tpe)), ""), List(('fieldObj).asTerm)).asExprOf[T] }) + }, + newSchema + ) + } + } + + private inline def fromExistingSchemaAndRw[T](schema: Schema[T])(using ClassTag[T], Configuration, Mirror.Of[T]): Pickler[T] = + Pickler( + new TapirPickle[T] { + val rw: ReadWriter[T] = summonFrom { + case foundTapirRW: ReadWriter[T] => + foundTapirRW + case foundUpickleDefaultRW: _root_.upickle.default.ReadWriter[T] => // there is BOTH schema and ReadWriter in scope + foundUpickleDefaultRW.asInstanceOf[ReadWriter[T]] + case _ => + errorForType[T]( + "Found implicit Schema[%s] but couldn't find a uPickle ReadWriter for this type. Either provide a ReadWriter, or remove the Schema from scope and let Pickler derive its own." + ) + null + } + override lazy val reader = rw + override lazy val writer = rw + }, + schema + ) + + private[pickler] inline def buildNewPickler[T: ClassTag]()(using m: Mirror.Of[T], c: Configuration): Pickler[T] = + // The lazy modifier is necessary for preventing infinite recursion in the derived instance for recursive types such as Lst + lazy val childPicklers: Tuple.Map[m.MirroredElemTypes, Pickler] = summonChildPicklerInstances[T, m.MirroredElemTypes] + inline m match { + case p: Mirror.ProductOf[T] => picklerProduct(p, childPicklers) + case _: Mirror.SumOf[T] => + val schema: Schema[T] = + inline if (isScalaEnum[T]) + Schema.derivedEnumeration[T].defaultStringBased + else + Schema.derived[T] + given SubtypeDiscriminator[T] = DefaultSubtypeDiscriminator[T]() + picklerSum(schema, childPicklers) + } + + private[pickler] inline def summonChildPicklerInstances[T: ClassTag, Fields <: Tuple](using + m: Mirror.Of[T], + c: Configuration + ): Tuple.Map[Fields, Pickler] = + inline erasedValue[Fields] match { + case _: (fieldType *: fieldTypesTail) => + val processedHead = deriveOrSummon[T, fieldType] + val processedTail = summonChildPicklerInstances[T, fieldTypesTail] + Tuple.fromArray((processedHead +: processedTail.toArray)).asInstanceOf[Tuple.Map[Fields, Pickler]] + case _: EmptyTuple.type => EmptyTuple.asInstanceOf[Tuple.Map[Fields, Pickler]] + } + + private inline def deriveOrSummon[T, FieldType](using Configuration): Pickler[FieldType] = + inline erasedValue[FieldType] match + case _: T => deriveRec[T, FieldType] + case _ => + summonFrom { + case p: Pickler[FieldType] => p + case _ => + errorForType[FieldType]( + "Failed to summon Pickler[%s]. Try using Pickler.derived or importing sttp.tapir.json.pickler.generic.auto.*" + ) + } + + private inline def deriveRec[T, FieldType](using config: Configuration): Pickler[FieldType] = + inline erasedValue[T] match + case _: FieldType => error("Infinite recursive derivation") + case _ => Pickler.derived[FieldType](using summonInline[ClassTag[FieldType]], config, summonInline[Mirror.Of[FieldType]]) + + // Extract child RWs from child picklers + // create a new RW from scratch using children rw and fields of the product + // use provided existing schema + // use data from schema to customize the new schema + private inline def picklerProduct[T: ClassTag, TFields <: Tuple]( + product: Mirror.ProductOf[T], + childPicklers: => Tuple.Map[TFields, Pickler] + )(using + config: Configuration + ): Pickler[T] = + lazy val derivedChildSchemas: Tuple.Map[TFields, Schema] = + childPicklers.map([t] => (p: t) => p.asInstanceOf[Pickler[t]].schema).asInstanceOf[Tuple.Map[TFields, Schema]] + val schema: Schema[T] = productSchema(derivedChildSchemas) + // only now schema fields are enriched properly + val enrichedChildSchemas = schema.schemaType.asInstanceOf[SProduct[T]].fields.map(_.schema) + val childDefaults = enrichedChildSchemas.map(_.default.map(_._1)) + + val tapirPickle = new TapirPickle[T] { + override def tagName = config.discriminator.getOrElse(super.tagName) + + override lazy val writer: Writer[T] = + macroProductW[T]( + schema, + childPicklers.map([a] => (obj: a) => obj.asInstanceOf[Pickler[a]].innerUpickle.writer).productIterator.toList, + childDefaults + ) + override lazy val reader: Reader[T] = + macroProductR[T](schema, childPicklers.map([a] => (obj: a) => obj.asInstanceOf[Pickler[a]].innerUpickle.reader), childDefaults)( + using product + ) + + } + Pickler[T](tapirPickle, schema) + + private inline def productSchema[T, TFields <: Tuple](childSchemas: Tuple.Map[TFields, Schema])(using + genericDerivationConfig: Configuration + ): Schema[T] = + SchemaDerivation.productSchema(genericDerivationConfig, childSchemas) + + private[tapir] inline def picklerSum[T: ClassTag, CP <: Tuple](schema: Schema[T], childPicklers: => CP)(using + m: Mirror.Of[T], + config: Configuration, + subtypeDiscriminator: SubtypeDiscriminator[T] = DefaultSubtypeDiscriminator[T]() + ): Pickler[T] = + val tapirPickle = new TapirPickle[T] { + override def tagName = config.discriminator.getOrElse(super.tagName) + override lazy val writer: Writer[T] = + macroSumW[T]( + childPicklers.map([a] => (obj: a) => obj.asInstanceOf[Pickler[a]].innerUpickle.writer).productIterator.toList, + subtypeDiscriminator + ) + override lazy val reader: Reader[T] = + macroSumR[T](childPicklers.map([a] => (obj: a) => obj.asInstanceOf[Pickler[a]].innerUpickle.reader), subtypeDiscriminator) + + } + new Pickler[T](tapirPickle, schema) + +/** A pickler combines the [[Schema]] of a type (which is used for documentation and validation of deserialized values), with a uPickle + * encoder/decoder ([[ReadWriter]]). The pickler module can derive both the schema, and the uPickle readwriters in a single go, using a + * common configuration API. + * + * An in-scope pickler instance is required by [[jsonBody]] (and its variants), but it can also be manually converted to a codec using + * [[Pickler.toCodec]]. + */ +@implicitNotFound(msg = """Could not summon a Pickler for type ${T}. +Picklers can be derived automatically by adding: `import sttp.tapir.json.pickler.generic.auto.*`, or manually using `Pickler.derived[T]`. +The latter is also useful for debugging derivation errors. +You can find more details in the docs: https://tapir.softwaremill.com/en/latest/endpoint/pickler.html.""") +case class Pickler[T](innerUpickle: TapirPickle[T], schema: Schema[T]): + + def toCodec: JsonCodec[T] = + import innerUpickle._ + given innerUpickle.Reader[T] = innerUpickle.reader + given innerUpickle.Writer[T] = innerUpickle.writer + given schemaT: Schema[T] = schema + Codec.json[T] { s => + Try(read[T](s)) match { + case Success(v) => Value(v) + case Failure(e) => Error(s, JsonDecodeException(errors = List.empty, e)) + } + } { t => write(t) } + + def asOption: Pickler[Option[T]] = + val newSchema = schema.asOption + new Pickler[Option[T]]( + new TapirPickle[Option[T]] { + given Reader[T] = innerUpickle.reader.asInstanceOf[Reader[T]] + given Writer[T] = innerUpickle.writer.asInstanceOf[Writer[T]] + override lazy val writer = summon[Writer[Option[T]]] + override lazy val reader = summon[Reader[Option[T]]] + }, + newSchema + ) + + def asIterable[C[X] <: Iterable[X]](using Factory[T, C[T]]): Pickler[C[T]] = + val newSchema = schema.asIterable[C] + new Pickler[C[T]]( + new TapirPickle[C[T]] { + given Reader[T] = innerUpickle.reader.asInstanceOf[Reader[T]] + given Writer[T] = innerUpickle.writer.asInstanceOf[Writer[T]] + override lazy val writer = summon[Writer[C[T]]] + override lazy val reader = summon[Reader[C[T]]] + }, + newSchema + ) + + def asArray(using ct: ClassTag[T]): Pickler[Array[T]] = + val newSchema = schema.asArray + new Pickler[Array[T]]( + new TapirPickle[Array[T]] { + given Reader[T] = innerUpickle.reader.asInstanceOf[Reader[T]] + given Writer[T] = innerUpickle.writer.asInstanceOf[Writer[T]] + override lazy val writer = summon[Writer[Array[T]]] + override lazy val reader = summon[Reader[Array[T]]] + }, + newSchema + ) + +given picklerToCodec[T](using p: Pickler[T]): JsonCodec[T] = p.toCodec diff --git a/json/pickler/src/main/scala/sttp/tapir/json/pickler/Readers.scala b/json/pickler/src/main/scala/sttp/tapir/json/pickler/Readers.scala new file mode 100644 index 0000000000..9ba80ef6bd --- /dev/null +++ b/json/pickler/src/main/scala/sttp/tapir/json/pickler/Readers.scala @@ -0,0 +1,82 @@ +package sttp.tapir.json.pickler + +import _root_.upickle.implicits.{ReadersVersionSpecific, macros => upickleMacros} +import sttp.tapir.{Schema, SchemaType} + +import scala.deriving.Mirror +import scala.reflect.ClassTag + +/** A modification of upickle.implicits.Readers, implemented in order to provide our custom JSON decoding and typeclass derivation logic: + * + * 1. A CaseClassReader[T] is built based on readers for child fields passed as an argument, instead of just summoning these readers. + * This allows us to operate on Picklers and use readers extracted from these Picklers. Summoning is now done on Pickler, not Reader + * level. + * 1. Default values can be passed as parameters, which are read from Schema annotations if present. Vanilla uPickle reads defaults only + * from case class defaults. + * 1. Subtype discriminator can be passed as a parameter, allowing specyfing custom key for discriminator field, as well as function for + * extracting discriminator value. + * 1. Schema is passed as a parameter, so that we can use its encodedName to transform field keys. + * 1. Configuration can be used for setting discrtiminator field name or decoding all field names according to custom function (allowing + * transformations like snake_case, etc.) + */ +private[pickler] trait Readers extends ReadersVersionSpecific with UpickleHelpers { + + case class LeafWrapper[T](leaf: TaggedReader.Leaf[T], r: Reader[T], leafTagValue: String) extends TaggedReader[T] { + override def findReader(s: String) = if (s == leafTagValue) r else null + } + + override def annotate[V](rw: Reader[V], n: String) = { + LeafWrapper(new TaggedReader.Leaf[V](n, rw), rw, n) + } + + inline def macroProductR[T](schema: Schema[T], childReaders: Tuple, childDefaults: List[Option[Any]])(using + m: Mirror.ProductOf[T] + ): Reader[T] = + val schemaFields = schema.schemaType.asInstanceOf[SchemaType.SProduct[T]].fields + + val reader = new CaseClassReadereader[T](upickleMacros.paramsCount[T], upickleMacros.checkErrorMissingKeysCount[T]()) { + override def visitors0 = childReaders + override def fromProduct(p: Product): T = m.fromProduct(p) + override def keyToIndex(x: String): Int = + schemaFields.indexWhere(_.name.encodedName == x) + + override def allKeysArray = schemaFields.map(_.name.encodedName).toArray + override def storeDefaults(x: _root_.upickle.implicits.BaseCaseObjectContext): Unit = { + macros.storeDefaultsTapir[T](x, childDefaults) + } + } + + inline if upickleMacros.isSingleton[T] then annotate[T](SingletonReader[T](upickleMacros.getSingleton[T]), upickleMacros.tagName[T]) + else if upickleMacros.isMemberOfSealedHierarchy[T] then annotate[T](reader, upickleMacros.tagName[T]) + else reader + + inline def macroSumR[T](derivedChildReaders: Tuple, subtypeDiscriminator: SubtypeDiscriminator[T]): Reader[T] = + implicit val currentlyDeriving: _root_.upickle.core.CurrentlyDeriving[T] = new _root_.upickle.core.CurrentlyDeriving() + subtypeDiscriminator match { + case discriminator: CustomSubtypeDiscriminator[T] => + // This part ensures that child product readers are replaced with product readers with proper "tag value". + // This value is used by uPickle internals to find a matching reader for given discriminator value. + // Originally product readers have this value set to class name when they are derived individually, + // so we need to 'fix' them here using discriminator settings. + val readersFromMapping = discriminator.mapping + .map { case (k, v) => (k, v.innerUpickle.reader) } + .map { + case (k, leaf) if leaf.isInstanceOf[LeafWrapper[_]] => + TaggedReader.Leaf[T](discriminator.asString(k), leaf.asInstanceOf[LeafWrapper[_]].r.asInstanceOf[Reader[T]]) + case (_, otherKindOfReader) => + otherKindOfReader + } + + new TaggedReader.Node[T](readersFromMapping.asInstanceOf[Seq[TaggedReader[T]]]: _*) + case discriminator: EnumValueDiscriminator[T] => + val readersForPossibleValues: Seq[TaggedReader[T]] = + discriminator.validator.possibleValues.zip(derivedChildReaders.toList).map { case (enumValue, reader) => + TaggedReader.Leaf[T](discriminator.encode(enumValue), reader.asInstanceOf[LeafWrapper[_]].r.asInstanceOf[Reader[T]]) + } + new TaggedReader.Node[T](readersForPossibleValues: _*) + + case _: DefaultSubtypeDiscriminator[T] => + val readers = derivedChildReaders.toList.asInstanceOf[List[TaggedReader[T]]] + Reader.merge(readers: _*) + } +} diff --git a/json/pickler/src/main/scala/sttp/tapir/json/pickler/SchemaDerivation.scala b/json/pickler/src/main/scala/sttp/tapir/json/pickler/SchemaDerivation.scala new file mode 100644 index 0000000000..653783fda3 --- /dev/null +++ b/json/pickler/src/main/scala/sttp/tapir/json/pickler/SchemaDerivation.scala @@ -0,0 +1,219 @@ +package sttp.tapir.json.pickler + +import sttp.tapir.SchemaType.{SProduct, SProductField, SRef} +import sttp.tapir.generic.Configuration +import sttp.tapir.{FieldName, Schema, SchemaType} + +import java.util.concurrent.ConcurrentHashMap +import scala.jdk.CollectionConverters.ConcurrentMapHasAsScala +import scala.quoted.* +import scala.reflect.ClassTag +import sttp.tapir.Validator + +private[pickler] object SchemaDerivation: + private[pickler] val deriveInProgress: scala.collection.mutable.Map[String, Unit] = new ConcurrentHashMap[String, Unit]().asScala + + inline def productSchema[T, TFields <: Tuple]( + genericDerivationConfig: Configuration, + childSchemas: Tuple.Map[TFields, Schema] + ): Schema[T] = + ${ productSchemaImpl('genericDerivationConfig, 'childSchemas) } + + def productSchemaImpl[T: Type, TFields <: Tuple]( + genericDerivationConfig: Expr[Configuration], + childSchemas: Expr[Tuple.Map[TFields, Schema]] + )(using Quotes, Type[TFields]): Expr[Schema[T]] = + new SchemaDerivation(genericDerivationConfig).productSchemaImpl(childSchemas) + +private class SchemaDerivation(genericDerivationConfig: Expr[Configuration])(using Quotes): + + import quotes.reflect.* + + private def productSchemaImpl[T: Type, TFields <: Tuple]( + childSchemas: Expr[Tuple.Map[TFields, Schema]] + )(using Quotes, Type[TFields]): Expr[Schema[T]] = + val tpe = TypeRepr.of[T] + val typeInfo = TypeInfo.forType(tpe) + val annotations = Annotations.onType(tpe) + '{ Schema[T](schemaType = ${ productSchemaType(childSchemas) }, name = Some(${ typeNameToSchemaName(typeInfo, annotations) })) } + + private def productSchemaType[T: Type, TFields <: Tuple]( + childSchemas: Expr[Tuple.Map[TFields, Schema]] + )(using Quotes, Type[TFields]): Expr[SProduct[T]] = + val tpe: TypeRepr = TypeRepr.of[T] + val fieldsAnnotations = Annotations.onParams(tpe) + val childSchemasArray = '{ $childSchemas.toArray } + '{ + SProduct(${ + Expr.ofList(tpe.typeSymbol.caseFields.zipWithIndex.map { case (fieldSymbol, i) => + val name = Expr(fieldSymbol.name) + + val fieldTpe = tpe.memberType(fieldSymbol) + val fieldAnnotations = fieldsAnnotations.getOrElse(fieldSymbol.name, Annotations.Empty) + + val encodedName = fieldAnnotations.encodedName.getOrElse('{ $genericDerivationConfig.toEncodedName($name) }) + + fieldTpe.asType match + case '[f] => + val fieldSchema: Expr[Schema[f]] = '{ $childSchemasArray(${ Expr(i) }).asInstanceOf[Schema[f]] } + val enrichedFieldSchema = enrichSchema(fieldSchema, fieldAnnotations) + + '{ + SProductField( + FieldName($name, $encodedName), + $enrichedFieldSchema, + obj => Some(${ Select('{ obj }.asTerm, fieldSymbol).asExprOf[f] }) + ) + } + }) + }) + } + + // helper methods + + private def summonClassTag[T: Type]: Expr[ClassTag[T]] = Expr.summon[ClassTag[T]] match + case None => report.errorAndAbort(s"Cannot find a ClassTag for ${Type.show[T]}!") + case Some(ct) => ct + + private def summonChildSchema[T: Type]: Expr[Schema[T]] = Expr.summon[Schema[T]] match + case None => report.errorAndAbort(s"Cannot find schema for ${Type.show[T]}!") + case Some(s) => s + + /** To avoid recursive loops, we keep track of the fully qualified names of types for which derivation is in progress using a global + * mutable Set. + */ + private def withCache[T: Type](typeInfo: TypeInfo, annotations: Annotations)(f: => Expr[Schema[T]]): Expr[Schema[T]] = + import SchemaDerivation.deriveInProgress + val cacheKey = typeInfo.full + if deriveInProgress.contains(cacheKey) then '{ Schema[T](SRef(${ typeNameToSchemaName(typeInfo, annotations) })) } + else + try + deriveInProgress.put(cacheKey, ()) + val schema = f + schema + finally deriveInProgress.remove(cacheKey) + + private def typeNameToSchemaName(typeInfo: TypeInfo, annotations: Annotations): Expr[Schema.SName] = + val encodedName: Option[Expr[String]] = annotations.encodedName + + encodedName match + case None => + def allTypeArguments(tn: TypeInfo): Seq[TypeInfo] = tn.typeParams.toList.flatMap(tn2 => tn2 +: allTypeArguments(tn2)) + '{ Schema.SName(${ Expr(typeInfo.full) }, ${ Expr.ofList(allTypeArguments(typeInfo).map(_.short).toList.map(Expr(_))) }) } + case Some(en) => + '{ Schema.SName($en, Nil) } + + private def enrichSchema[X: Type](schema: Expr[Schema[X]], annotations: Annotations): Expr[Schema[X]] = + annotations.all.foldLeft(schema) { (schema, annTerm) => + annTerm.asExpr match + case '{ $ann: Schema.annotations.description } => '{ $schema.description($ann.text) } + case '{ $ann: Schema.annotations.encodedExample } => '{ $schema.encodedExample($ann.example) } + case '{ $ann: Schema.annotations.default[? <: X] } => '{ $schema.default($ann.default, $ann.encoded) } + case '{ $ann: Schema.annotations.validate[X] } => '{ $schema.validate($ann.v) } + case '{ $ann: Schema.annotations.validateEach[?] } => + '{ $schema.modifyUnsafe[X](Schema.ModifyCollectionElements)((_: Schema[X]).validate($ann.v.asInstanceOf[Validator[X]])) } + case '{ $ann: Schema.annotations.format } => '{ $schema.format($ann.format) } + case '{ $ann: Schema.annotations.deprecated } => '{ $schema.deprecated(true) } + case '{ $ann: Schema.annotations.customise } => '{ $ann.f($schema).asInstanceOf[Schema[X]] } + case _ => schema + } + + // helper classes + + private case class TypeInfo(owner: String, short: String, typeParams: Iterable[TypeInfo]): + def full: String = s"$owner.$short" + + private object TypeInfo: + def forType(tpe: TypeRepr): TypeInfo = + def normalizedName(s: Symbol): String = + if s.flags.is(Flags.Module) then s.name.stripSuffix("$") else s.name + def name(tpe: TypeRepr): String = tpe match + case TermRef(typeRepr, name) if tpe.typeSymbol.flags.is(Flags.Module) => name.stripSuffix("$") + case TermRef(typeRepr, name) => name + case _ => normalizedName(tpe.typeSymbol) + + def ownerNameChain(sym: Symbol): List[String] = + if sym.isNoSymbol then List.empty + else if sym == defn.EmptyPackageClass then List.empty + else if sym == defn.RootPackage then List.empty + else if sym == defn.RootClass then List.empty + else ownerNameChain(sym.owner) :+ normalizedName(sym) + + def owner(tpe: TypeRepr): String = ownerNameChain(tpe.typeSymbol.maybeOwner).mkString(".") + + tpe match + case AppliedType(tpe, args) => TypeInfo(owner(tpe), name(tpe), args.map(forType)) + case _ => TypeInfo(owner(tpe), name(tpe), Nil) + + // + private class Annotations(topLevel: List[Term], inherited: List[Term]): + lazy val all: List[Term] = + // skip inherited annotations if defined at the top-level + topLevel ++ inherited.filterNot(i => topLevel.exists(t => t.tpe <:< i.tpe)) + + def encodedName: Option[Expr[String]] = all + .map(_.asExpr) + .collectFirst { case '{ $en: Schema.annotations.encodedName } => en } + .map(en => '{ $en.name }) + + private object Annotations: + val Empty: Annotations = Annotations(Nil, Nil) + + def onType(tpe: TypeRepr): Annotations = + val topLevel: List[Term] = tpe.typeSymbol.annotations.filter(filterAnnotation) + val inherited: List[Term] = + tpe.baseClasses + .filterNot(isObjectOrScala) + .collect { + case s if s != tpe.typeSymbol => s.annotations + } // skip self + .flatten + .filter(filterAnnotation) + Annotations(topLevel, inherited) + + def onParams(tpe: TypeRepr): Map[String, Annotations] = + def paramAnns: List[(String, List[Term])] = groupByParamName { + (fromConstructor(tpe.typeSymbol) ++ fromDeclarations(tpe.typeSymbol)) + .filter { case (_, anns) => anns.nonEmpty } + } + + def inheritedParamAnns: List[(String, List[Term])] = + groupByParamName { + tpe.baseClasses + .filterNot(isObjectOrScala) + .collect { + case s if s != tpe.typeSymbol => + (fromConstructor(s) ++ fromDeclarations(s)).filter { case (_, anns) => + anns.nonEmpty + } + } + .flatten + } + + def fromConstructor(from: Symbol): List[(String, List[Term])] = + from.primaryConstructor.paramSymss.flatten.map { field => field.name -> field.annotations.filter(filterAnnotation) } + + def fromDeclarations(from: Symbol): List[(String, List[Term])] = + from.declarations.collect { + // using TypeTest + case field: Symbol if (field.tree match { case _: ValDef => true; case _ => false }) => + field.name -> field.annotations.filter(filterAnnotation) + } + + def groupByParamName(anns: List[(String, List[Term])]) = + anns + .groupBy { case (name, _) => name } + .toList + .map { case (name, l) => name -> l.flatMap(_._2) } + + val topLevel = paramAnns.toMap + val inherited = inheritedParamAnns.toMap + val params = topLevel.keySet ++ inherited.keySet + params.map(p => p -> Annotations(topLevel.getOrElse(p, Nil), inherited.getOrElse(p, Nil))).toMap + + private def isObjectOrScala(bc: Symbol) = + bc.name.contains("java.lang.Object") || bc.fullName.startsWith("scala.") + + private def filterAnnotation(a: Term): Boolean = + a.tpe.typeSymbol.maybeOwner.isNoSymbol || + a.tpe.typeSymbol.owner.fullName != "scala.annotation.internal" diff --git a/json/pickler/src/main/scala/sttp/tapir/json/pickler/SubtypeDiscriminator.scala b/json/pickler/src/main/scala/sttp/tapir/json/pickler/SubtypeDiscriminator.scala new file mode 100644 index 0000000000..86cf90bcde --- /dev/null +++ b/json/pickler/src/main/scala/sttp/tapir/json/pickler/SubtypeDiscriminator.scala @@ -0,0 +1,27 @@ +package sttp.tapir.json.pickler + +import sttp.tapir.Validator + +private[pickler] sealed trait SubtypeDiscriminator[T] + +/** Describes non-standard encoding/decoding for subtypes in sealed hierarchies. Allows specifying an extractor function, for example to + * read subtype discriminator from a field. Requires also mapping in the opposite direction, to specify how to read particular + * discriminator values into concrete subtype picklers. + */ +private[pickler] trait CustomSubtypeDiscriminator[T] extends SubtypeDiscriminator[T]: + type V + def extractor: T => V + def asString: V => String + def write(t: T): String = asString(extractor(t)) + def mapping: Seq[(V, Pickler[_ <: T])] + + // to integrate with uPickle where at some point all we have is Any + def writeUnsafe(t: Any): String = asString(extractor(t.asInstanceOf[T])) + +/** Describes non-standard encoding/decoding and validation for enums. Allows specifying an encoder function which transforms an enum value + * to String for serialization (for example by referring a field in enum's base trait, or calling .ordinal.toString for numbers). + */ +private[pickler] case class EnumValueDiscriminator[T](encode: T => String, validator: Validator.Enumeration[T]) + extends SubtypeDiscriminator[T] + +private[pickler] case class DefaultSubtypeDiscriminator[T]() extends SubtypeDiscriminator[T] diff --git a/json/pickler/src/main/scala/sttp/tapir/json/pickler/TapirPickle.scala b/json/pickler/src/main/scala/sttp/tapir/json/pickler/TapirPickle.scala new file mode 100644 index 0000000000..c45ee95ca4 --- /dev/null +++ b/json/pickler/src/main/scala/sttp/tapir/json/pickler/TapirPickle.scala @@ -0,0 +1,24 @@ +package sttp.tapir.json.pickler + +import _root_.upickle.AttributeTagged + +/** Our custom modification of uPickle encoding/decoding logic. A standard way to use uPickle is to import `upickle.default` object which + * allows generating Reader[T]/Writer[T]. We create our own object with same API as `upickle.default`, but modified logic, which can be + * found in Readers and Writers traits. + */ +trait TapirPickle[T] extends AttributeTagged with Readers with Writers: + def reader: this.Reader[T] + def writer: this.Writer[T] + + // This ensures that None is encoded as null instead of an empty array + override given OptionWriter[T: Writer]: Writer[Option[T]] = + summon[Writer[T]].comapNulls[Option[T]] { + case None => null.asInstanceOf[T] + case Some(x) => x + } + + // This ensures that null is read as None + override given OptionReader[T: Reader]: Reader[Option[T]] = + new Reader.Delegate[Any, Option[T]](summon[Reader[T]].map(Some(_))) { + override def visitNull(index: Int) = None + } diff --git a/json/pickler/src/main/scala/sttp/tapir/json/pickler/UpickleHelpers.scala b/json/pickler/src/main/scala/sttp/tapir/json/pickler/UpickleHelpers.scala new file mode 100644 index 0000000000..4a11d64405 --- /dev/null +++ b/json/pickler/src/main/scala/sttp/tapir/json/pickler/UpickleHelpers.scala @@ -0,0 +1,11 @@ +package sttp.tapir.json.pickler + +private[pickler] trait UpickleHelpers: + def scanChildren[T, V](xs: Seq[T])(f: T => V) = // copied from uPickle + var x: V = null.asInstanceOf[V] + val i = xs.iterator + while (x == null && i.hasNext) { + val t = f(i.next()) + if (t != null) x = t + } + x diff --git a/json/pickler/src/main/scala/sttp/tapir/json/pickler/Writers.scala b/json/pickler/src/main/scala/sttp/tapir/json/pickler/Writers.scala new file mode 100644 index 0000000000..88720cc9bf --- /dev/null +++ b/json/pickler/src/main/scala/sttp/tapir/json/pickler/Writers.scala @@ -0,0 +1,103 @@ +package sttp.tapir.json.pickler + +import _root_.upickle.core.Annotator.Checker +import _root_.upickle.core.{ObjVisitor, Visitor, _} +import _root_.upickle.implicits.{WritersVersionSpecific, macros => upickleMacros} +import sttp.tapir.Schema +import sttp.tapir.SchemaType.SProduct +import sttp.tapir.generic.Configuration + +import scala.reflect.ClassTag + +/** A modification of upickle.implicits.Writers, implemented in order to provide our custom JSON encoding and typeclass derivation logic: + * + * 1. A CaseClassWriter[T] is built based on writers for child fields passed as an argument, instead of just summoning these writers. + * This allows us to operate on Picklers and use Writers extracted from these Picklers. Summoning is now done on Pickler, not Writer + * level. + * 1. Default values can be passed as parameters, which are read from Schema annotations if present. Vanilla uPickle reads defaults only + * from case class defaults. + * 1. Subtype discriminator can be passed as a parameter, allowing specyfing custom key for discriminator field, as well as function for + * extracting discriminator value + * 1. Schema is passed as a parameter, so that we can use its encodedName to transform field keys + * 1. Configuration can be used for setting discrtiminator field name or encoding all field names according to custom function (allowing + * transformations like snake_case, etc.) + */ +private[pickler] trait Writers extends WritersVersionSpecific with UpickleHelpers: + inline def macroProductW[T: ClassTag]( + schema: Schema[T], + childWriters: => List[Any], + childDefaults: => List[Option[Any]] + )(using + Configuration + ) = + lazy val writer = new CaseClassWriter[T] { + def length(v: T) = upickleMacros.writeLength[T](outerThis, v) + + val sProduct = schema.schemaType.asInstanceOf[SProduct[T]] + + override def write0[R](out: Visitor[_, R], v: T): R = { + if (v == null) out.visitNull(-1) + else { + val ctx = out.visitObject(length(v), true, -1) + macros.writeSnippets[R, T]( + sProduct, + outerThis, + this, + v, + ctx, + childWriters, + childDefaults + ) + ctx.visitEnd(-1) + } + } + + def writeToObject[R](ctx: _root_.upickle.core.ObjVisitor[_, R], v: T): Unit = + macros.writeSnippets[R, T]( + sProduct, + outerThis, + this, + v, + ctx, + childWriters, + childDefaults + ) + } + + inline if upickleMacros.isMemberOfSealedHierarchy[T] && !macros.isScalaEnum[T] then + annotate[T]( + writer, + upickleMacros.tagName[T], + Annotator.Checker.Cls(implicitly[ClassTag[T]].runtimeClass) + ) // tagName is responsible for extracting the @tag annotation meaning the discriminator value + else if upickleMacros.isSingleton[T] + then // moved after "if MemberOfSealed" to handle case objects in hierarchy as case classes - with discriminator, for consistency + // here we handle enums + annotate[T](SingletonWriter[T](null.asInstanceOf[T]), upickleMacros.tagName[T], Annotator.Checker.Val(upickleMacros.getSingleton[T])) + else writer + + inline def macroSumW[T: ClassTag](inline childWriters: => List[Any], subtypeDiscriminator: SubtypeDiscriminator[T])( + using Configuration + ) = + implicit val currentlyDeriving: _root_.upickle.core.CurrentlyDeriving[T] = new _root_.upickle.core.CurrentlyDeriving() + val writers: List[TaggedWriter[_ <: T]] = childWriters + .asInstanceOf[List[TaggedWriter[_ <: T]]] + + new TaggedWriter.Node[T](writers: _*) { + override def findWriter(v: Any): (String, ObjectWriter[T]) = { + subtypeDiscriminator match { + case discriminator: CustomSubtypeDiscriminator[T] => + val (tag, w) = super.findWriter(v) + val overriddenTag = discriminator.writeUnsafe(v) // here we use our discirminator instead of uPickle's + (overriddenTag, w) + case discriminator: EnumValueDiscriminator[T] => + val (t, writer) = super.findWriter(v) + val overriddenTag = discriminator.encode(v.asInstanceOf[T]) + (overriddenTag, writer) + + case _: DefaultSubtypeDiscriminator[T] => + val (t, writer) = super.findWriter(v) + (t, writer) + } + } + } diff --git a/json/pickler/src/main/scala/sttp/tapir/json/pickler/generic.scala b/json/pickler/src/main/scala/sttp/tapir/json/pickler/generic.scala new file mode 100644 index 0000000000..55c518bfda --- /dev/null +++ b/json/pickler/src/main/scala/sttp/tapir/json/pickler/generic.scala @@ -0,0 +1,13 @@ +package sttp.tapir.json.pickler.generic + +import scala.reflect.ClassTag +import scala.deriving.Mirror +import sttp.tapir.generic.Configuration +import sttp.tapir.json.pickler.Pickler + +/** Import `sttp.tapir.json.pickler.auto.*`` for automatic generic pickler derivation. A [[Pickler]] will be derived at the usage side using + * [[Pickler.derived]] for each type where a given `Pickler` is not available in the current given/implicit scope. + */ +object auto { + inline implicit def picklerForCaseClass[T: ClassTag](implicit m: Mirror.Of[T], c: Configuration): Pickler[T] = Pickler.derived[T] +} diff --git a/json/pickler/src/main/scala/sttp/tapir/json/pickler/macros.scala b/json/pickler/src/main/scala/sttp/tapir/json/pickler/macros.scala new file mode 100644 index 0000000000..be86af5505 --- /dev/null +++ b/json/pickler/src/main/scala/sttp/tapir/json/pickler/macros.scala @@ -0,0 +1,114 @@ +package sttp.tapir.json.pickler + +import _root_.upickle.implicits.* +import _root_.upickle.implicits.{macros => uMacros} +import sttp.tapir.SchemaType +import sttp.tapir.SchemaType.SProduct + +import scala.quoted.* + +import compiletime.* + +/** Macros, mostly copied from uPickle, and modified to allow our customizations like passing writers/readers as parameters, adjusting + * encoding/decoding logic to make it coherent with the schema. + */ +private[pickler] object macros: + type IsInt[A <: Int] = A + + private[pickler] inline def writeSnippets[R, T]( + inline sProduct: SProduct[T], + inline thisOuter: upickle.core.Types with upickle.implicits.MacrosCommon, + inline self: upickle.implicits.CaseClassReadWriters#CaseClassWriter[T], + inline v: T, + inline ctx: _root_.upickle.core.ObjVisitor[_, R], + childWriters: List[Any], + childDefaults: List[Option[Any]] + ): Unit = + ${ writeSnippetsImpl[R, T]('sProduct, 'thisOuter, 'self, 'v, 'ctx, 'childWriters, 'childDefaults) } + + private[pickler] def writeSnippetsImpl[R, T]( + sProduct: Expr[SProduct[T]], + thisOuter: Expr[upickle.core.Types with upickle.implicits.MacrosCommon], + self: Expr[upickle.implicits.CaseClassReadWriters#CaseClassWriter[T]], + v: Expr[T], + ctx: Expr[_root_.upickle.core.ObjVisitor[_, R]], + childWriters: Expr[List[?]], + childDefaults: Expr[List[Option[?]]] + )(using Quotes, Type[T], Type[R]): Expr[Unit] = + + import quotes.reflect.* + Expr.block( + for (((rawLabel, label), i) <- uMacros.fieldLabelsImpl0[T].zipWithIndex) yield { + val tpe0 = TypeRepr.of[T].memberType(rawLabel).asType + tpe0 match + case '[tpe] => + Literal(IntConstant(i)).tpe.asType match + case '[IsInt[index]] => + val encodedName = '{ ${ sProduct }.fields(${ Expr(i) }).name.encodedName } + val select = Select.unique(v.asTerm, rawLabel.name).asExprOf[Any] + '{ + ${ self }.writeSnippetMappedName[R, tpe]( + ${ ctx }, + ${ encodedName }, + ${ childWriters }(${ Expr(i) }), + ${ select } + ) + } + }, + '{ () } + ) + + private[pickler] inline def storeDefaultsTapir[T]( + inline x: upickle.implicits.BaseCaseObjectContext, + defaultsFromSchema: List[Option[Any]] + ): Unit = ${ + storeDefaultsImpl[T]('x, 'defaultsFromSchema) + } + + private[pickler] def storeDefaultsImpl[T](x: Expr[upickle.implicits.BaseCaseObjectContext], defaultsFromSchema: Expr[List[Option[Any]]])( + using + Quotes, + Type[T] + ) = { + import quotes.reflect.* + + val defaults = uMacros.getDefaultParamsImpl0[T] + val statements = uMacros + .fieldLabelsImpl0[T] + .zipWithIndex + .map { case ((rawLabel, label), i) => + Expr.block( + List('{ + // modified uPickle macro - this additional expression looks for defaults in the schema + // and applies them to override defaults from the type definition + ${ defaultsFromSchema }(${ Expr(i) }).foreach { schemaDefaultValue => + ${ x }.storeValueIfNotFound(${ Expr(i) }, schemaDefaultValue) + } + }), + if (defaults.contains(label)) '{ ${ x }.storeValueIfNotFound(${ Expr(i) }, ${ defaults(label) }) } + else '{} + ) + } + + Expr.block(statements, '{}) + } + + transparent inline def isScalaEnum[X]: Boolean = inline compiletime.erasedValue[X] match + case _: Null => false + case _: Nothing => false + case _: reflect.Enum => allChildrenObjectsOrEnumCases[X] + case _ => false + + /** Checks whether all children of type T are objects or enum cases or sealed parents of such. Useful for determining whether an enum is + * indeed an enum, or will be desugared to a sealed hierarchy, in which case it's not really an enumeration in context of schemas and + * JSON codecs. + */ + inline def allChildrenObjectsOrEnumCases[T]: Boolean = ${ allChildrenObjectsOrEnumCasesImpl[T] } + + import scala.quoted._ + + def allChildrenObjectsOrEnumCasesImpl[T: Type](using q: Quotes): Expr[Boolean] = + import quotes.reflect.* + val tpe = TypeRepr.of[T] + val symbol = tpe.typeSymbol + Expr(symbol.children.nonEmpty && !symbol.children.exists(c => c.isClassDef && !(c.flags is Flags.Sealed))) diff --git a/json/pickler/src/main/scala/sttp/tapir/json/pickler/package.scala b/json/pickler/src/main/scala/sttp/tapir/json/pickler/package.scala new file mode 100644 index 0000000000..5ec48fc710 --- /dev/null +++ b/json/pickler/src/main/scala/sttp/tapir/json/pickler/package.scala @@ -0,0 +1,12 @@ +package sttp.tapir.json.pickler + +import sttp.tapir._ + +def jsonBody[T: Pickler]: EndpointIO.Body[String, T] = stringBodyUtf8AnyFormat(summon[Pickler[T]].toCodec) + +def jsonBodyWithRaw[T: Pickler]: EndpointIO.Body[String, (String, T)] = stringBodyUtf8AnyFormat( + Codec.tupledWithRaw(summon[Pickler[T]].toCodec) +) + +def jsonQuery[T: Pickler](name: String): EndpointInput.Query[T] = + queryAnyFormat[T, CodecFormat.Json](name, Codec.jsonQuery(summon[Pickler[T]].toCodec)) diff --git a/json/pickler/src/test/scala/sttp/tapir/json/pickler/Fixtures.scala b/json/pickler/src/test/scala/sttp/tapir/json/pickler/Fixtures.scala new file mode 100644 index 0000000000..da07832846 --- /dev/null +++ b/json/pickler/src/test/scala/sttp/tapir/json/pickler/Fixtures.scala @@ -0,0 +1,81 @@ +package sttp.tapir.json.pickler + +import sttp.tapir.Schema.annotations.default +import sttp.tapir.Schema.annotations.description +import java.util.UUID +import sttp.tapir.Schema.annotations.encodedName + +object Fixtures: + enum ColorEnum: + case Green, Pink + + case class Book(author: String, title: String) derives Pickler + case class BookShelf(books: List[Book]) derives Pickler + + case class Response(color: ColorEnum, description: String) + + enum RichColorEnum(val code: Int): + case Cyan extends RichColorEnum(3) + case Magenta extends RichColorEnum(18) + + case class RichColorResponse(color: RichColorEnum) + + enum Entity: + case Person(first: String, age: Int) + case Business(address: String) + + case class ClassWithDefault(@default("field-a-default") fieldA: String, fieldB: String) + case class ClassWithScalaDefault(fieldA: String = "field-a-default", fieldB: String) + case class ClassWithScalaAndTapirDefault( + @default("field-a-tapir-default") fieldA: String = "field-a-scala-default", + fieldB: String, + fieldC: Int = 55 + ) + case class FlatClass(fieldA: Int, fieldB: String) + case class TopClass(fieldA: String, fieldB: InnerClass) + case class InnerClass(fieldA11: Int) + + case class TopClass2(fieldA: String, fieldB: AnnotatedInnerClass) + case class AnnotatedInnerClass(@encodedName("encoded_field-a") fieldA: String, fieldB: String) + case class ClassWithDefault2(@default("field-a-default-2") fieldA: String, @default(ErrorTimeout) fieldB: ErrorCode) + case class ClassWithDefault3( + fieldA: ErrorCode, + @description("desc1") @default(InnerCaseClass("def-field", 65)) fieldB: InnerCaseClass, + fieldC: InnerCaseClass + ) + case class InnerCaseClass(fieldInner: String, @default(4) fieldInnerInt: Int) + case class FlatClassWithOption(fieldA: String, fieldB: Option[Int]) + case class NestedClassWithOption(innerField: Option[FlatClassWithOption]) + + case class FlatClassWithList(fieldA: String, fieldB: List[Int]) + case class NestedClassWithList(innerField: List[FlatClassWithList]) + case class FlatClassWithArray(fieldA: String, fieldB: Array[Int]) + case class NestedClassWithArray(innerField: Array[FlatClassWithArray]) + case class SimpleTestResult(msg: String) + case class ClassWithEither(fieldA: String, fieldB: Either[String, SimpleTestResult]) + case class ClassWithMap(field: Map[String, SimpleTestResult]) + case class ClassWithMapCustomKey(field: Map[UUID, SimpleTestResult]) + case class UserId(value: UUID) extends AnyVal + case class UserName(name: String) extends AnyVal + case class ClassWithValues(id: UserId, name: UserName, age: Int) + sealed trait ErrorCode + + case object ErrorNotFound extends ErrorCode + case object ErrorTimeout extends ErrorCode + case class CustomError(msg: String) extends ErrorCode + + sealed trait Status: + def code: Int + + case class StatusOk(oF: Int) extends Status { + def code = 200 + } + case class StatusBadRequest(bF: Int) extends Status { + def code = 400 + } + + case object StatusInternalError extends Status { + def code = 500 + } + + case class StatusResponse(status: Status) diff --git a/json/pickler/src/test/scala/sttp/tapir/json/pickler/PicklerTest.scala b/json/pickler/src/test/scala/sttp/tapir/json/pickler/PicklerTest.scala new file mode 100644 index 0000000000..727994f908 --- /dev/null +++ b/json/pickler/src/test/scala/sttp/tapir/json/pickler/PicklerTest.scala @@ -0,0 +1,539 @@ +package sttp.tapir.json.pickler + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import sttp.tapir.DecodeResult.Value +import sttp.tapir.Schema +import sttp.tapir.generic.Configuration +import sttp.tapir.SchemaType +import sttp.tapir.Schema.annotations.encodedName +import sttp.tapir.Schema.annotations.default +import java.util.UUID + +import Fixtures.* + +class PicklerTest extends AnyFlatSpec with Matchers { + behavior of "Pickler derivation" + + it should "build from an existing Schema and upickle.default.ReadWriter" in { + // given schema and reader / writer in scope + given givenSchemaForCc: Schema[FlatClass] = Schema.derived[FlatClass] + given rw: _root_.upickle.default.ReadWriter[FlatClass] = _root_.upickle.default.macroRW[FlatClass] + + // when + val derived = Pickler.derived[FlatClass] + val obj = derived.toCodec.decode("""{"fieldA": 654, "fieldB": "field_b_value"}""") + + // then + obj shouldBe Value(FlatClass(654, "field_b_value")) + } + + it should "work with `derives`" in { + // when + val bookPickler: Pickler[Book] = summon[Pickler[Book]] + val bookShelfPickler: Pickler[BookShelf] = summon[Pickler[BookShelf]] + + // then + bookPickler.toCodec.encode(Book("John", "Hello")) shouldBe """{"author":"John","title":"Hello"}""" + bookShelfPickler.toCodec.encode(BookShelf(List(Book("Alice", "Goodbye")))) shouldBe + """{"books":[{"author":"Alice","title":"Goodbye"}]}""" + } + + it should "build an instance for a flat case class" in { + // when + val derived = Pickler.derived[FlatClass] + val jsonStr = derived.toCodec.encode(FlatClass(44, "b_value")) + + // then + jsonStr shouldBe """{"fieldA":44,"fieldB":"b_value"}""" + } + + it should "build an instance for a case class with a nested case class" in { + // given + import generic.auto.* // for Pickler auto-derivation + + // when + val derived = Pickler.derived[TopClass] + val jsonStr = derived.toCodec.encode(TopClass("field_a_value", InnerClass(7954))) + val inputJson = """{"fieldA":"field_a_value_2","fieldB":{"fieldA11":-321}}""" + val resultObj = derived.toCodec.decode(inputJson) + + // then + jsonStr shouldBe """{"fieldA":"field_a_value","fieldB":{"fieldA11":7954}}""" + resultObj shouldBe Value(TopClass("field_a_value_2", InnerClass(-321))) + } + + it should "fail to derive a Pickler when there's a Schema but missing ReadWriter" in { + assertDoesNotCompile(""" + given givenSchemaForCc: Schema[FlatClass] = Schema.derived[FlatClass] + Pickler.derived[FlatClass] + """) + } + + it should "use encodedName from configuration" in { + // given + import generic.auto.* // for Pickler auto-derivation + given schemaConfig: Configuration = Configuration.default.withSnakeCaseMemberNames + + // when + val derived = Pickler.derived[TopClass] + val jsonStr = derived.toCodec.encode(TopClass("field_a_value", InnerClass(7954))) + + // then + jsonStr shouldBe """{"field_a":"field_a_value","field_b":{"field_a11":7954}}""" + } + + it should "use encodedName from annotations" in { + // given + import generic.auto.* // for Pickler auto-derivation + + // when + val derived = Pickler.derived[TopClass2] + val jsonStr = derived.toCodec.encode(TopClass2("field_a_value", AnnotatedInnerClass("f-a-value", "f-b-value"))) + + // then + jsonStr shouldBe """{"fieldA":"field_a_value","fieldB":{"encoded_field-a":"f-a-value","fieldB":"f-b-value"}}""" + } + + it should "Decode in a Reader using custom encodedName" in { + // given + import generic.auto.* // for Pickler auto-derivation + given schemaConfig: Configuration = Configuration.default.withSnakeCaseMemberNames + + // when + val derived = Pickler.derived[TopClass] + val jsonStr = """{"field_a":"field_a_value","field_b":{"field_a11":7954}}""" + val obj = derived.toCodec.decode(jsonStr) + + // then + obj shouldBe Value(TopClass("field_a_value", InnerClass(7954))) + } + + it should "derive picklers for Option fields" in { + import generic.auto.* // for Pickler auto-derivation + + // when + val pickler1 = Pickler.derived[FlatClassWithOption] + val pickler2 = Pickler.derived[NestedClassWithOption] + val jsonStr1 = pickler1.toCodec.encode(FlatClassWithOption("fieldA value", Some(-4018))) + val jsonStr2 = pickler2.toCodec.encode(NestedClassWithOption(Some(FlatClassWithOption("fieldA value2", Some(-3014))))) + val jsonStr3 = pickler1.toCodec.encode(FlatClassWithOption("fieldA value", None)) + + // then + { + given derivedFlatClassSchema: Schema[FlatClassWithOption] = Schema.derived[FlatClassWithOption] + pickler1.schema shouldBe derivedFlatClassSchema + pickler2.schema shouldBe Schema.derived[NestedClassWithOption] + jsonStr1 shouldBe """{"fieldA":"fieldA value","fieldB":-4018}""" + jsonStr2 shouldBe """{"innerField":{"fieldA":"fieldA value2","fieldB":-3014}}""" + jsonStr3 shouldBe """{"fieldA":"fieldA value","fieldB":null}""" + } + } + + it should "derive picklers for List fields" in { + import generic.auto.* // for Pickler auto-derivation + + // when + val pickler1 = Pickler.derived[FlatClassWithList] + val codec1 = pickler1.toCodec + val pickler2 = Pickler.derived[NestedClassWithList] + val codec2 = pickler2.toCodec + val obj1 = FlatClassWithList("fieldA value", List(64, -5)) + val obj2 = NestedClassWithList(List(FlatClassWithList("a2", Nil), FlatClassWithList("a3", List(8, 9)))) + val jsonStr1 = codec1.encode(obj1) + val jsonStr2 = codec2.encode(obj2) + + // then + jsonStr1 shouldBe """{"fieldA":"fieldA value","fieldB":[64,-5]}""" + codec1.decode(jsonStr1) shouldBe Value(obj1) + jsonStr2 shouldBe """{"innerField":[{"fieldA":"a2","fieldB":[]},{"fieldA":"a3","fieldB":[8,9]}]}""" + codec2.decode(jsonStr2) shouldBe Value(obj2) + { + import sttp.tapir.generic.auto.* + pickler2.schema shouldBe Schema.derived[NestedClassWithList] + } + } + + it should "derive picklers for Array fields" in { + import generic.auto.* // for Pickler auto-derivation + + // when + val pickler1 = Pickler.derived[FlatClassWithArray] + val codec1 = pickler1.toCodec + val pickler2 = Pickler.derived[NestedClassWithArray] + val codec2 = pickler2.toCodec + val obj1 = FlatClassWithArray("fieldA value 50", Array(8, 8, 107)) + val obj2 = NestedClassWithArray(Array(FlatClassWithArray("a2", Array()), FlatClassWithArray("a3", Array(-10)))) + val jsonStr1 = codec1.encode(obj1) + val jsonStr2 = codec2.encode(obj2) + + // then + jsonStr1 shouldBe """{"fieldA":"fieldA value 50","fieldB":[8,8,107]}""" + jsonStr2 shouldBe """{"innerField":[{"fieldA":"a2","fieldB":[]},{"fieldA":"a3","fieldB":[-10]}]}""" + { + import sttp.tapir.generic.auto.* + pickler2.schema shouldBe Schema.derived[NestedClassWithArray] + } + } + it should "derive picklers for Either fields" in { + import generic.auto.* // for Pickler auto-derivation + + // when + val pickler = Pickler.derived[ClassWithEither] + val codec = pickler.toCodec + val obj1 = ClassWithEither("fieldA 1", Left("err1")) + val obj2 = ClassWithEither("fieldA 2", Right(SimpleTestResult("it is fine"))) + val jsonStr1 = codec.encode(obj1) + val jsonStr2 = codec.encode(obj2) + + // then + jsonStr1 shouldBe """{"fieldA":"fieldA 1","fieldB":[0,"err1"]}""" + jsonStr2 shouldBe """{"fieldA":"fieldA 2","fieldB":[1,{"msg":"it is fine"}]}""" + { + import sttp.tapir.generic.auto.* + pickler.schema shouldBe Schema.derived[ClassWithEither] + } + } + + it should "derive picklers for Map with String key" in { + import generic.auto.* // for Pickler auto-derivation + + // when + val pickler = Pickler.derived[ClassWithMap] + val codec = pickler.toCodec + val obj = ClassWithMap(Map(("keyB", SimpleTestResult("result1")), ("keyA", SimpleTestResult("result2")))) + val jsonStr = codec.encode(obj) + + // then + jsonStr shouldBe """{"field":{"keyB":{"msg":"result1"},"keyA":{"msg":"result2"}}}""" + { + import sttp.tapir.generic.auto.* + pickler.schema shouldBe Schema.derived[ClassWithMap] + } + } + + it should "derive picklers for Map with non-String key" in { + import generic.auto.* // for Pickler auto-derivation + + // when + given picklerMap: Pickler[Map[UUID, SimpleTestResult]] = Pickler.picklerForMap(_.toString) + val pickler = Pickler.derived[ClassWithMapCustomKey] + val uuid1: UUID = UUID.randomUUID() + val uuid2: UUID = UUID.randomUUID() + val codec = pickler.toCodec + val obj = ClassWithMapCustomKey(Map((uuid1, SimpleTestResult("result3")), (uuid2, SimpleTestResult("result4")))) + val jsonStr = codec.encode(obj) + + // then + jsonStr shouldBe s"""{"field":{"$uuid1":{"msg":"result3"},"$uuid2":{"msg":"result4"}}}""" + { + import sttp.tapir.generic.auto.* + picklerMap.schema shouldBe Schema.schemaForMap[UUID, SimpleTestResult](_.toString) + given Schema[Map[UUID, SimpleTestResult]] = picklerMap.schema + pickler.schema shouldBe Schema.derived[ClassWithMapCustomKey] + } + } + it should "handle a simple ADT (no customizations)" in { + // given + import generic.auto.* // for Pickler auto-derivation + case class MyCaseClass(fieldA: ErrorCode, fieldB: String) + + // when + val derived = Pickler.derived[MyCaseClass] + val jsonStr1 = derived.toCodec.encode(MyCaseClass(ErrorTimeout, "msg18")) + val jsonStr2 = derived.toCodec.encode(MyCaseClass(CustomError("customErrMsg"), "msg18")) + + // then + jsonStr1 shouldBe """{"fieldA":{"$type":"sttp.tapir.json.pickler.Fixtures.ErrorTimeout"},"fieldB":"msg18"}""" + jsonStr2 shouldBe """{"fieldA":{"$type":"sttp.tapir.json.pickler.Fixtures.CustomError","msg":"customErrMsg"},"fieldB":"msg18"}""" + } + + it should "apply custom field name encoding to a simple ADT" in { + // given + import generic.auto.* // for Pickler auto-derivation + given schemaConfig: Configuration = Configuration.default.copy(toEncodedName = _.toUpperCase()) + case class MyCaseClass(fieldA: ErrorCode, fieldB: String) + + // when + val derived = Pickler.derived[MyCaseClass] + val jsonStr1 = derived.toCodec.encode(MyCaseClass(ErrorTimeout, "msg18")) + val jsonStr2 = derived.toCodec.encode(MyCaseClass(CustomError("customErrMsg"), "msg18")) + + // then + jsonStr1 shouldBe """{"FIELDA":{"$type":"sttp.tapir.json.pickler.Fixtures.ErrorTimeout"},"FIELDB":"msg18"}""" + jsonStr2 shouldBe """{"FIELDA":{"$type":"sttp.tapir.json.pickler.Fixtures.CustomError","MSG":"customErrMsg"},"FIELDB":"msg18"}""" + } + + it should "apply defaults from annotations" in { + // given + import generic.auto.* // for Pickler auto-derivation + + // when + val codecCc1 = Pickler.derived[ClassWithDefault].toCodec + val codecCc2 = Pickler.derived[ClassWithDefault2].toCodec + val codecCc3 = Pickler.derived[ClassWithDefault3].toCodec + val jsonStrCc11 = codecCc1.encode(ClassWithDefault("field-a-user-value", "msg104")) + val object12 = codecCc1.decode("""{"fieldB":"msg105"}""") + val object2 = codecCc2.decode("""{"fieldA":"msgCc12"}""") + val object3 = + codecCc3.decode( + """{"fieldA":{"$type":"sttp.tapir.json.pickler.Fixtures.ErrorNotFound"}, "fieldC": {"fieldInner": "deeper field inner"}}""" + ) + + // then + jsonStrCc11 shouldBe """{"fieldA":"field-a-user-value","fieldB":"msg104"}""" + object12 shouldBe Value(ClassWithDefault("field-a-default", "msg105")) + object2 shouldBe Value(ClassWithDefault2("msgCc12", ErrorTimeout)) + object3 shouldBe Value(ClassWithDefault3(ErrorNotFound, InnerCaseClass("def-field", 65), InnerCaseClass("deeper field inner", 4))) + } + + it should "apply defaults from class fields, then annotations" in { + // given + import generic.auto.* // for Pickler auto-derivation + + // when + val codecCc1 = Pickler.derived[ClassWithScalaDefault].toCodec + val codecCc2 = Pickler.derived[ClassWithScalaAndTapirDefault].toCodec + val jsonStrCc11 = codecCc1.encode(ClassWithScalaDefault("field-a-user-value", "msg104")) + val jsonStrCc12 = codecCc1.encode(ClassWithScalaDefault("field-a-default", "text b")) + val object12 = codecCc1.decode("""{"fieldB":"msg205"}""") + val object2 = codecCc2.decode("""{"fieldB":"msgCc22"}""") + + // then + jsonStrCc11 shouldBe """{"fieldA":"field-a-user-value","fieldB":"msg104"}""" + jsonStrCc12 shouldBe """{"fieldA":"field-a-default","fieldB":"text b"}""" + object12 shouldBe Value(ClassWithScalaDefault("field-a-default", "msg205")) + object2 shouldBe Value(ClassWithScalaAndTapirDefault("field-a-tapir-default", "msgCc22", 55)) + } + + it should "apply custom discriminator name to a simple ADT" in { + // given + import generic.auto.* // for Pickler auto-derivation + given schemaConfig: Configuration = Configuration.default.withDiscriminator("kind") + case class MyCaseClass(fieldA: ErrorCode, fieldB: String) + val inputObj1 = MyCaseClass(CustomError("customErrMsg2"), "msg19") + val inputObj2 = MyCaseClass(ErrorNotFound, "") + + // when + val derived = Pickler.derived[MyCaseClass] + val codec = derived.toCodec + val jsonStr1 = codec.encode(inputObj1) + val jsonStr2 = codec.encode(inputObj2) + + // then + jsonStr1 shouldBe """{"fieldA":{"kind":"sttp.tapir.json.pickler.Fixtures.CustomError","msg":"customErrMsg2"},"fieldB":"msg19"}""" + jsonStr2 shouldBe """{"fieldA":{"kind":"sttp.tapir.json.pickler.Fixtures.ErrorNotFound"},"fieldB":""}""" + codec.decode(jsonStr1) shouldBe Value(inputObj1) + codec.decode(jsonStr2) shouldBe Value(inputObj2) + } + + it should "Set discriminator value using class name" in { + // given + import generic.auto.* // for Pickler auto-derivation + + // when + val picklerResponse = Pickler.derived[StatusResponse] + val inputObject = StatusResponse(StatusBadRequest(55)) + val codec = picklerResponse.toCodec + val jsonStr = codec.encode(inputObject) + val decoded = codec.decode(jsonStr) + + // then + jsonStr shouldBe """{"status":{"$type":"sttp.tapir.json.pickler.Fixtures.StatusBadRequest","bF":55}}""" + decoded shouldBe Value(inputObject) + } + + it should "work2" in { + sealed trait Entity { + def kind: String + } + case class Person(firstName: String, lastName: String) extends Entity { + def kind: String = "person" + } + case class Organization(name: String) extends Entity { + def kind: String = "org" + } + + import sttp.tapir.* + import sttp.tapir.json.* + + val pPerson = Pickler.derived[Person] + val pOrganization = Pickler.derived[Organization] + given pEntity: Pickler[Entity] = + Pickler.oneOfUsingField[Entity, String](_.kind, _.toString)("person" -> pPerson, "org" -> pOrganization) + + // { "$type": "person", "firstName": "Jessica", "lastName": "West" } + pEntity.toCodec.encode(Person("Jessica", "West")) + } + it should "Set discriminator value using oneOfUsingField" in { + // given + val picklerOk = Pickler.derived[StatusOk] + val picklerBadRequest = Pickler.derived[StatusBadRequest] + val picklerInternalError = Pickler.derived[StatusInternalError.type] + + // when + given statusPickler: Pickler[Status] = Pickler.oneOfUsingField[Status, Int](_.code, codeInt => s"code-$codeInt")( + 200 -> picklerOk, + 400 -> picklerBadRequest, + 500 -> picklerInternalError + ) + val picklerResponse = Pickler.derived[StatusResponse] + val codec = picklerResponse.toCodec + val inputObject1 = StatusResponse(StatusBadRequest(54)) + val jsonStr1 = codec.encode(inputObject1) + val decoded1 = codec.decode(jsonStr1) + val inputObject2 = StatusResponse(StatusInternalError) + val jsonStr2 = codec.encode(inputObject2) + val decoded2 = codec.decode(jsonStr2) + + // then + jsonStr1 shouldBe """{"status":{"$type":"code-400","bF":54}}""" + decoded1 shouldBe Value(inputObject1) + jsonStr2 shouldBe """{"status":{"$type":"code-500"}}""" + decoded2 shouldBe Value(inputObject2) + } + + it should "Set discriminator value with oneOfUsingField for a deeper hierarchy" in { + // given + sealed trait Status: + def code: Int + + sealed trait DeeperStatus extends Status + sealed trait DeeperStatus2 extends Status + + case class StatusOk(oF: Int) extends DeeperStatus { + def code = 200 + } + case class StatusBadRequest(bF: Int) extends DeeperStatus2 { + def code = 400 + } + + case class Response(status: Status) + val picklerOk = Pickler.derived[StatusOk] + val picklerBadRequest = Pickler.derived[StatusBadRequest] + + // when + given statusPickler: Pickler[Status] = Pickler.oneOfUsingField[Status, Int](_.code, codeInt => s"code-$codeInt")( + 200 -> picklerOk, + 400 -> picklerBadRequest + ) + val picklerResponse = Pickler.derived[Response] + val inputObject = Response(StatusOk(818)) + val codec = picklerResponse.toCodec + val encoded = codec.encode(inputObject) + val decoded = codec.decode(encoded) + + // then + encoded shouldBe """{"status":{"$type":"code-200","oF":818}}""" + decoded shouldBe Value(inputObject) + } + + it should "support simple enums" in { + // given + import generic.auto.* // for Pickler auto-derivation + + // when + val picklerResponse = Pickler.derived[Response] + val codec = picklerResponse.toCodec + val inputObj = Response(ColorEnum.Pink, "pink!!") + val encoded = codec.encode(inputObj) + + // then + encoded shouldBe """{"color":"Pink","description":"pink!!"}""" + codec.decode(encoded) shouldBe Value(inputObj) + } + + it should "support enums with fields" in { + // given + import generic.auto.* // for Pickler auto-derivation + + // when + val picklerResponse = Pickler.derived[RichColorResponse] + val codec = picklerResponse.toCodec + val inputObj = RichColorResponse(RichColorEnum.Cyan) + val encoded = codec.encode(inputObj) + + // then + encoded shouldBe """{"color":"Cyan"}""" + codec.decode(encoded) shouldBe Value(inputObj) + } + + it should "support sealed hierarchies looking like enums" in { + // given + import generic.auto.* // for Pickler auto-derivation + + // when + val picklerResponse = Pickler.derived[Entity] + val codec = picklerResponse.toCodec + val inputObj = Entity.Business("221B Baker Street") + val encoded = codec.encode(inputObj) + + // then + encoded shouldBe """{"$type":"Business","address":"221B Baker Street"}""" + codec.decode(encoded) shouldBe Value(inputObj) + } + + it should "handle enums with ordinal encoding" in { + // given + given Pickler[ColorEnum] = Pickler + .derivedEnumeration[ColorEnum] + .customStringBased(_.ordinal.toString) + + // when + val picklerResponse = Pickler.derived[Response] + val codec = picklerResponse.toCodec + val inputObj = Response(ColorEnum.Pink, "pink!!") + val encoded = codec.encode(inputObj) + + // then + encoded shouldBe """{"color":"1","description":"pink!!"}""" + codec.decode(encoded) shouldBe Value(inputObj) + } + + it should "handle enums with custom function encoding" in { + // given + given picklerColorEnum: Pickler[RichColorEnum] = + Pickler.derivedEnumeration[RichColorEnum].customStringBased(enumValue => s"color-number-${enumValue.code}") + + // when + val picklerResponse = Pickler.derived[RichColorResponse] + val codec = picklerResponse.toCodec + val inputObj = RichColorResponse(RichColorEnum.Cyan) + val encoded = codec.encode(inputObj) + + // then + encoded shouldBe """{"color":"color-number-3"}""" + codec.decode(encoded) shouldBe Value(inputObj) + } + + it should "handle value classes" in { + // when + val pickler = Pickler.derived[ClassWithValues] + val codec = pickler.toCodec + val inputObj = ClassWithValues(UserId(UUID.fromString("550e8400-e29b-41d4-a716-446655440000")), UserName("Alan"), age = 65) + val encoded = codec.encode(inputObj) + + // then + encoded shouldBe """{"id":"550e8400-e29b-41d4-a716-446655440000","name":"Alan","age":65}""" + codec.decode(encoded) shouldBe Value(inputObj) + } + + it should "Reject oneOfUsingField for enums" in { + // given + assertCompiles(""" + import Fixtures.* + val picklerCyan = Pickler.derived[RichColorEnum.Cyan.type] + val picklerMagenta = Pickler.derived[RichColorEnum.Magenta.type]""") + // when + assertDoesNotCompile(""" + import Fixtures.* + val picklerCyan = Pickler.derived[RichColorEnum.Cyan.type] + val picklerMagenta = Pickler.derived[RichColorEnum.Magenta.type] + + given picklerRichColor: Pickler[RichColorEnum] = + Pickler.oneOfUsingField[RichColorEnum, Int](_.code, codeInt => s"code-$codeInt")( + 3 -> picklerCyan, + 18 -> picklerMagenta + )""") + } +} diff --git a/json/pickler/src/test/scala/sttp/tapir/json/pickler/SchemaDerivationTest.scala b/json/pickler/src/test/scala/sttp/tapir/json/pickler/SchemaDerivationTest.scala new file mode 100644 index 0000000000..0346f5154d --- /dev/null +++ b/json/pickler/src/test/scala/sttp/tapir/json/pickler/SchemaDerivationTest.scala @@ -0,0 +1,517 @@ +package sttp.tapir.json.pickler + +import org.scalatest.Assertions +import org.scalatest.flatspec.AsyncFlatSpec +import org.scalatest.matchers.should.Matchers +import sttp.tapir.Schema.annotations._ +import sttp.tapir.Schema.{SName, schemaForBoolean} +import sttp.tapir.SchemaMacroTestData.{Cat, Dog, Hamster, Pet} +import sttp.tapir.SchemaType._ +import sttp.tapir.TestUtil.field +import sttp.tapir.{AttributeKey, FieldName, Schema, SchemaType, Validator} + +import java.math.{BigDecimal => JBigDecimal, BigInteger => JBigInteger} +import sttp.tapir.generic.Configuration + +class SchemaGenericAutoTest extends AsyncFlatSpec with Matchers { + import SchemaGenericAutoTest._ + + import generic.auto._ + def implicitlySchema[T: Pickler]: Schema[T] = summon[Pickler[T]].schema + + "Schema auto derivation" should "find schema for simple types" in { + stringSchema.schemaType shouldBe SString() + stringSchema.isOptional shouldBe false + + implicitlySchema[Short].schemaType shouldBe SInteger() + intSchema.schemaType shouldBe SInteger() + longSchema.schemaType shouldBe SInteger() + implicitlySchema[Float].schemaType shouldBe SNumber() + implicitlySchema[Double].schemaType shouldBe SNumber() + implicitlySchema[Boolean].schemaType shouldBe SBoolean() + implicitlySchema[BigDecimal].schemaType shouldBe SNumber() + implicitlySchema[JBigDecimal].schemaType shouldBe SNumber() + implicitlySchema[JBigInteger].schemaType shouldBe SInteger() + } + + it should "find schema for optional types" in { + implicitlySchema[Option[String]].schemaType shouldBe SOption[Option[String], String](Schema(SString()))(identity) + implicitlySchema[Option[String]].isOptional shouldBe true + } + + it should "find schema for collections" in { + implicitlySchema[Array[String]].schemaType shouldBe SArray[Array[String], String](stringSchema)(_.toIterable) + implicitlySchema[Array[String]].isOptional shouldBe true + + implicitlySchema[List[String]].schemaType shouldBe SArray[List[String], String](stringSchema)(_.toIterable) + implicitlySchema[List[String]].isOptional shouldBe true + + implicitlySchema[Set[String]].schemaType shouldBe SArray[Set[String], String](stringSchema)(_.toIterable) + implicitlySchema[Set[String]].isOptional shouldBe true + } + + val expectedASchema: Schema[A] = + Schema[A]( + SProduct( + List(field(FieldName("f1"), stringSchema), field(FieldName("f2"), intSchema), field(FieldName("f3"), stringSchema.asOption)) + ), + Some(SName("sttp.tapir.json.pickler.A")) + ) + + case class ListA(fl: List[A]) + + it should "find schema for collections of case classes" in { + implicitlySchema[ListA].schemaType shouldBe SProduct( + List(SProductField(FieldName("fl"), Schema(SArray[List[A], A](expectedASchema)(_.toIterable), isOptional = true), _ => None)) + ) + } + + it should "find schema for a simple case class" in { + implicitlySchema[A] shouldBe expectedASchema + implicitlySchema[A].schemaType.asInstanceOf[SProduct[A]].required shouldBe List(FieldName("f1"), FieldName("f2")) + } + + it should "find schema for a simple case class and use identity naming transformation" in { + implicitlySchema[D].schemaType shouldBe expectedDSchema + } + + it should "find schema for a nested case class" in { + implicitlySchema[B].name shouldBe Some(SName("sttp.tapir.json.pickler.B")) + implicitlySchema[B].schemaType shouldBe SProduct[B]( + List(field(FieldName("g1"), stringSchema), field(FieldName("g2"), expectedASchema)) + ) + } + + it should "find schema for case classes with collections" in { + implicitlySchema[C].name shouldBe Some(SName("sttp.tapir.json.pickler.C")) + implicitlySchema[C].schemaType shouldBe SProduct[C]( + List(field(FieldName("h1"), stringSchema.asArray), field(FieldName("h2"), intSchema.asOption)) + ) + implicitlySchema[C].schemaType.asInstanceOf[SProduct[C]].required shouldBe Nil + } + + // it should "use custom schema for custom types" in { // TODO + // implicit val scustom: Schema[Custom] = Schema[Custom](SchemaType.SString()) + // val schema = Pickler.derived[G].schema + // schema.name shouldBe Some(SName("sttp.tapir.json.pickler.G")) + // schema.schemaType shouldBe SProduct[G]( + // List(field(FieldName("f1"), intSchema), field(FieldName("f2"), stringSchema)) + // ) + // } + + it should "derive schema for parametrised type classes" in { + val schema = implicitlySchema[H[A]] + schema.name shouldBe Some(SName("sttp.tapir.json.pickler.H", List("A"))) + schema.schemaType shouldBe SProduct[H[A]](List(field(FieldName("data"), expectedASchema))) + } + + it should "find schema for map" in { + val schema = implicitlySchema[Map[String, Int]] + schema.name shouldBe Some(SName("Map", List("Int"))) + schema.schemaType shouldBe SOpenProduct[Map[String, Int], Int](Nil, intSchema)(identity) + } + + it should "find schema for map of products" in { + val schema = implicitlySchema[Map[String, D]] + schema.name shouldBe Some(SName("Map", List("D"))) + schema.schemaType shouldBe SOpenProduct[Map[String, D], D]( + Nil, + Schema(SProduct(List(field(FieldName("someFieldName"), stringSchema))), Some(SName("sttp.tapir.json.pickler.D"))) + )(identity) + } + + it should "find schema for map of generic products" in { + val schema = implicitlySchema[Map[String, H[D]]] + schema.name shouldBe Some(SName("Map", List("H", "D"))) + schema.schemaType shouldBe SOpenProduct[Map[String, H[D]], H[D]]( + Nil, + Schema( + SProduct[H[D]]( + List( + field( + FieldName("data"), + Schema(SProduct[D](List(field(FieldName("someFieldName"), stringSchema))), Some(SName("sttp.tapir.json.pickler.D"))) + ) + ) + ), + Some(SName("sttp.tapir.json.pickler.H", List("D"))) + ) + )(identity) + } + + ignore should "add meta-data to schema from annotations" in { // TODO https://github.com/softwaremill/tapir/issues/3167 + val schema = implicitlySchema[I] + schema shouldBe Schema[I]( + SProduct( + List( + field( + FieldName("int"), + intSchema.description("some int field").format("int32").default(1234).encodedExample(1234).validate(Validator.max(100)) + ), + field(FieldName("noDesc"), longSchema), + field( + FieldName("bool", "alternativeBooleanName"), + implicitlySchema[Option[Boolean]].description("another optional boolean flag") + ), + field( + FieldName("child", "child-k-name"), + Schema[K]( + SProduct( + List( + field(FieldName("double"), implicitlySchema[Double].format("double64")), + field(FieldName("str"), stringSchema.format("special-string")) + ) + ), + Some(SName("sttp.tapir.json.pickler.K")) + ).deprecated(true).description("child-k-desc") + ) + ) + ), + Some(SName("sttp.tapir.json.pickler.I")) + ).description( + "class I" + ) // TODO this causes test to fail, because SchemaDerivation doesn't support @description annotation on case classes + } + + it should "find the right schema for a case class with simple types" in { + // given + case class Test1( + f1: String, + f2: Byte, + f3: Short, + f4: Int, + f5: Long, + f6: Float, + f7: Double, + f8: Boolean, + f9: BigDecimal, + f10: JBigDecimal, + f11: JBigInteger + ) + val schema = implicitlySchema[Test1] + + // when + schema.name shouldBe Some(SName("sttp.tapir.json.pickler.SchemaGenericAutoTest..Test1")) + schema.schemaType shouldBe SProduct[Test1]( + List( + field(FieldName("f1"), implicitlySchema[String]), + field(FieldName("f2"), implicitlySchema[Byte]), + field(FieldName("f3"), implicitlySchema[Short]), + field(FieldName("f4"), implicitlySchema[Int]), + field(FieldName("f5"), implicitlySchema[Long]), + field(FieldName("f6"), implicitlySchema[Float]), + field(FieldName("f7"), implicitlySchema[Double]), + field(FieldName("f8"), implicitlySchema[Boolean]), + field(FieldName("f9"), implicitlySchema[BigDecimal]), + field(FieldName("f10"), implicitlySchema[JBigDecimal]), + field(FieldName("f11"), implicitlySchema[JBigInteger]) + ) + ) + } + + it should "find schema for a simple case class and use snake case naming transformation" in { + val expectedSnakeCaseNaming = + expectedDSchema.copy(fields = List(field[D, String](FieldName("someFieldName", "some_field_name"), stringSchema))) + implicit val customConf: Configuration = Configuration.default.withSnakeCaseMemberNames + implicitlySchema[D].schemaType shouldBe expectedSnakeCaseNaming + } + + it should "find schema for a simple case class and use kebab case naming transformation" in { + val expectedKebabCaseNaming = + expectedDSchema.copy(fields = List(field[D, String](FieldName("someFieldName", "some-field-name"), stringSchema))) + implicit val customConf: Configuration = Configuration.default.withKebabCaseMemberNames + implicitlySchema[D].schemaType shouldBe expectedKebabCaseNaming + } + + it should "not transform names which are annotated with a custom name" in { + implicit val customConf: Configuration = Configuration.default.withSnakeCaseMemberNames + val schema = implicitlySchema[L] + schema shouldBe Schema[L]( + SProduct( + List( + field(FieldName("firstField", "specialName"), intSchema), + field(FieldName("secondField", "second_field"), intSchema) + ) + ), + Some(SName("sttp.tapir.json.pickler.L")) + ) + } + + ignore should "customise the schema using the given function" in { // TODO https://github.com/softwaremill/tapir/issues/3166 + val schema = implicitlySchema[M] + schema.attribute(M.testAttributeKey) shouldBe Some("test") + } + + it should "generate one-of schema using the given discriminator" in { + implicit val customConf: Configuration = Configuration.default.withDiscriminator("who_am_i") + val schemaType = implicitlySchema[Entity].schemaType + schemaType shouldBe a[SCoproduct[Entity]] + + schemaType.asInstanceOf[SCoproduct[Entity]].subtypes should contain theSameElementsAs List( + Schema( + SProduct[Organization]( + List(field(FieldName("name"), Schema(SString())), field(FieldName("who_am_i"), Schema(SString()))) + ), + Some(SName("sttp.tapir.json.pickler.Organization")) + ), + Schema( + SProduct[Person]( + List( + field(FieldName("first"), Schema(SString())), + field(FieldName("age"), Schema(SInteger(), format = Some("int32"))), + field(FieldName("who_am_i"), Schema(SString())) + ) + ), + Some(SName("sttp.tapir.json.pickler.Person")) + ), + Schema( + SProduct[UnknownEntity.type]( + List( + field(FieldName("who_am_i"), Schema(SString())) + ) + ), + Some(SName("sttp.tapir.json.pickler.UnknownEntity")) + ) + ) + + schemaType.asInstanceOf[SCoproduct[Entity]].discriminator shouldBe Some( + SDiscriminator( + FieldName("who_am_i"), + Map( + "Organization" -> SRef(SName("sttp.tapir.json.pickler.Organization")), + "Person" -> SRef(SName("sttp.tapir.json.pickler.Person")), + "UnknownEntity" -> SRef(SName("sttp.tapir.json.pickler.UnknownEntity")) + ) + ) + ) + } + + it should "generate one-of schema using the given discriminator (kebab case subtype names)" in { + implicit val customConf: Configuration = Configuration.default.withDiscriminator("who_am_i").withKebabCaseDiscriminatorValues + implicitlySchema[Entity].schemaType.asInstanceOf[SCoproduct[Entity]].discriminator shouldBe Some( + SDiscriminator( + FieldName("who_am_i"), + Map( + "organization" -> SRef(SName("sttp.tapir.json.pickler.Organization")), + "person" -> SRef(SName("sttp.tapir.json.pickler.Person")), + "unknown-entity" -> SRef(SName("sttp.tapir.json.pickler.UnknownEntity")) + ) + ) + ) + } + + it should "generate one-of schema using the given discriminator (snake case subtype names)" in { + implicit val customConf: Configuration = Configuration.default.withDiscriminator("who_am_i").withSnakeCaseDiscriminatorValues + implicitlySchema[Entity].schemaType.asInstanceOf[SCoproduct[Entity]].discriminator shouldBe Some( + SDiscriminator( + FieldName("who_am_i"), + Map( + "organization" -> SRef(SName("sttp.tapir.json.pickler.Organization")), + "person" -> SRef(SName("sttp.tapir.json.pickler.Person")), + "unknown_entity" -> SRef(SName("sttp.tapir.json.pickler.UnknownEntity")) + ) + ) + ) + } + + it should "generate one-of schema using the given discriminator (full subtype names)" in { + implicit val customConf: Configuration = Configuration.default.withDiscriminator("who_am_i").withFullDiscriminatorValues + implicitlySchema[Entity].schemaType.asInstanceOf[SCoproduct[Entity]].discriminator shouldBe Some( + SDiscriminator( + FieldName("who_am_i"), + Map( + "sttp.tapir.json.pickler.Organization" -> SRef(SName("sttp.tapir.json.pickler.Organization")), + "sttp.tapir.json.pickler.Person" -> SRef(SName("sttp.tapir.json.pickler.Person")), + "sttp.tapir.json.pickler.UnknownEntity" -> SRef(SName("sttp.tapir.json.pickler.UnknownEntity")) + ) + ) + ) + } + + it should "generate one-of schema using the given discriminator (full kebab case subtype names)" in { + implicit val customConf: Configuration = Configuration.default.withDiscriminator("who_am_i").withFullKebabCaseDiscriminatorValues + implicitlySchema[Entity].schemaType.asInstanceOf[SCoproduct[Entity]].discriminator shouldBe Some( + SDiscriminator( + FieldName("who_am_i"), + Map( + "sttp.tapir.json.pickler.organization" -> SRef(SName("sttp.tapir.json.pickler.Organization")), + "sttp.tapir.json.pickler.person" -> SRef(SName("sttp.tapir.json.pickler.Person")), + "sttp.tapir.json.pickler.unknown-entity" -> SRef(SName("sttp.tapir.json.pickler.UnknownEntity")) + ) + ) + ) + } + + it should "generate one-of schema using the given discriminator (full snake case subtype names)" in { + implicit val customConf: Configuration = Configuration.default.withDiscriminator("who_am_i").withFullSnakeCaseDiscriminatorValues + implicitlySchema[Entity].schemaType.asInstanceOf[SCoproduct[Entity]].discriminator shouldBe Some( + SDiscriminator( + FieldName("who_am_i"), + Map( + "sttp.tapir.json.pickler.organization" -> SRef(SName("sttp.tapir.json.pickler.Organization")), + "sttp.tapir.json.pickler.person" -> SRef(SName("sttp.tapir.json.pickler.Person")), + "sttp.tapir.json.pickler.unknown_entity" -> SRef(SName("sttp.tapir.json.pickler.UnknownEntity")) + ) + ) + ) + } + + it should "find schema for subtypes containing parent metadata from annotations" in { + val schemaType = implicitlySchema[Pet].schemaType + + val expectedCatSchema = Schema( + SProduct[Cat]( + List( + field(FieldName("name"), stringSchema.copy(description = Some("cat name"))), + field(FieldName("catFood"), stringSchema.copy(description = Some("cat food"))) + ) + ), + Some(SName("sttp.tapir.SchemaMacroTestData.Cat")) + ) + + val expectedDogSchema = Schema( + SProduct[Dog]( + List( + field(FieldName("name"), stringSchema.copy(description = Some("name"))), + field(FieldName("dogFood"), stringSchema.copy(description = Some("dog food"))) + ) + ), + Some(SName("sttp.tapir.SchemaMacroTestData.Dog")) + ) + + val expectedHamsterSchema = Schema( + SProduct[Hamster]( + List( + field(FieldName("name"), stringSchema.copy(description = Some("name"))), + field(FieldName("likesNuts"), booleanSchema.copy(description = Some("likes nuts?"))) + ) + ), + Some(SName("sttp.tapir.SchemaMacroTestData.Hamster")) + ) + + val subtypes = schemaType.asInstanceOf[SCoproduct[Pet]].subtypes + + List(expectedCatSchema, expectedDogSchema, expectedHamsterSchema) + .foldLeft(Assertions.succeed)((_, schema) => subtypes.contains(schema) shouldBe true) + } + + it should "add validators for collection and option elements" in { + case class ValidateEachTest( + @validateEach(Validator.min(5)) + ints: List[Int], + @validateEach[String](Validator.minLength(3)) + maybeString: Option[String] + ) + + val schema = implicitlySchema[ValidateEachTest] + schema.applyValidation(ValidateEachTest(Nil, None)) should have size 0 + schema.applyValidation(ValidateEachTest(List(6, 10), Some("1234"))) should have size 0 + schema.applyValidation(ValidateEachTest(List(6, 0, 10), Some("1234"))) should have size 1 + schema.applyValidation(ValidateEachTest(List(6, 10), Some("12"))) should have size 1 + } +} + +object SchemaGenericAutoTest { + import generic.auto._ + def implicitlySchema[A: Pickler]: Schema[A] = summon[Pickler[A]].schema + + private[json] val stringSchema = implicitlySchema[String] + private[json] val intSchema = implicitlySchema[Int] + private[json] val longSchema = implicitlySchema[Long] + private[json] val booleanSchema = implicitlySchema[Boolean] + + val expectedDSchema: SProduct[D] = + SProduct[D](List(field(FieldName("someFieldName"), stringSchema))) + + // comparing recursive schemas without validators + private[json] def removeValidators[T](s: Schema[T]): Schema[T] = (s.schemaType match { + case SProduct(fields) => s.copy(schemaType = SProduct(convertToSProductField(fields))) + case st @ SCoproduct(subtypes, discriminator) => + s.copy(schemaType = + SCoproduct( + subtypes.map(subtypeSchema => removeValidators(subtypeSchema)), + discriminator + )(st.subtypeSchema) + ) + case st @ SOpenProduct(fields, valueSchema) => + s.copy(schemaType = + SOpenProduct( + fields = convertToSProductField(fields), + valueSchema = removeValidators(valueSchema) + )(st.mapFieldValues) + ) + case st @ SArray(element) => s.copy(schemaType = SArray(removeValidators(element))(st.toIterable)) + case st @ SOption(element) => s.copy(schemaType = SOption(removeValidators(element))(st.toOption)) + case _ => s + }).copy(validator = Validator.pass) + + private def convertToSProductField[T](fields: List[SProductField[T]]) = { + fields.map(f => SProductField[T, f.FieldType](f.name, removeValidators(f.schema), f.get)) + } +} + +case class StringValueClass(value: String) extends AnyVal +case class IntegerValueClass(value: Int) extends AnyVal + +case class A(f1: String, f2: Int, f3: Option[String]) +case class B(g1: String, g2: A) +case class C(h1: List[String], h2: Option[Int]) +case class D(someFieldName: String) +case class F(f1: List[F], f2: Int) + +class Custom(c: String) +case class G(f1: Int, f2: Custom) + +case class H[T](data: T) + +@description("class I") +case class I( + @description("some int field") + @default(1234) + @encodedExample(1234) + @format("int32") + @validate[Int](Validator.max(100)) + int: Int, + noDesc: Long, + @description("another optional boolean flag") + @encodedName("alternativeBooleanName") + bool: Option[Boolean], + @deprecated + @description("child-k-desc") + @encodedName("child-k-name") + child: K +) + +case class K( + @format("double64") + double: Double, + @format("special-string") + str: String +) + +case class L( + @encodedName("specialName") + firstField: Int, + secondField: Int +) + +@customise(s => s.attribute(M.testAttributeKey, "test")) +case class M(field: Int) +object M { + val testAttributeKey: AttributeKey[String] = AttributeKey[String] +} + +sealed trait Node +case class Edge(id: Long, source: Node) extends Node +case class SimpleNode(id: Long) extends Node + +case class IOpt(i1: Option[IOpt], i2: Int) +case class JOpt(data: Option[IOpt]) + +case class IList(i1: List[IList], i2: Int) +case class JList(data: List[IList]) + +sealed trait Entity +case class Person(first: String, age: Int) extends Entity +case class Organization(name: String) extends Entity +case object UnknownEntity extends Entity