From b90599bae1437c214b0510ee104566efeea0ac08 Mon Sep 17 00:00:00 2001 From: kciesielski Date: Tue, 7 May 2024 10:33:02 +0200 Subject: [PATCH] Rework perf tests --- build.sbt | 6 +- perf-tests/README.md | 114 ++++------------ .../sttp/tapir/perf/apis/ServerRunner.scala | 4 +- .../sttp/tapir/perf/apis/TypeScanner.scala | 38 ++++++ .../sttp/tapir/perf/CsvReportPrinter.scala | 32 ----- .../sttp/tapir/perf/GatlingLogProcessor.scala | 93 ------------- .../scala/sttp/tapir/perf/GatlingRunner.scala | 28 ---- .../sttp/tapir/perf/HtmlReportPrinter.scala | 45 ------- .../sttp/tapir/perf/PerfTestSuiteParams.scala | 101 -------------- .../sttp/tapir/perf/PerfTestSuiteResult.scala | 14 -- .../sttp/tapir/perf/PerfTestSuiteRunner.scala | 89 ------------- .../scala/sttp/tapir/perf/Simulations.scala | 124 +++++++++--------- .../scala/sttp/tapir/perf/TypeScanner.scala | 64 --------- 13 files changed, 128 insertions(+), 624 deletions(-) create mode 100644 perf-tests/src/main/scala/sttp/tapir/perf/apis/TypeScanner.scala delete mode 100644 perf-tests/src/test/scala/sttp/tapir/perf/CsvReportPrinter.scala delete mode 100644 perf-tests/src/test/scala/sttp/tapir/perf/GatlingLogProcessor.scala delete mode 100644 perf-tests/src/test/scala/sttp/tapir/perf/GatlingRunner.scala delete mode 100644 perf-tests/src/test/scala/sttp/tapir/perf/HtmlReportPrinter.scala delete mode 100644 perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteParams.scala delete mode 100644 perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteResult.scala delete mode 100644 perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteRunner.scala delete mode 100644 perf-tests/src/test/scala/sttp/tapir/perf/TypeScanner.scala diff --git a/build.sbt b/build.sbt index 5dceb41841..3474002aa7 100644 --- a/build.sbt +++ b/build.sbt @@ -507,13 +507,11 @@ lazy val perfTests: ProjectMatrix = (projectMatrix in file("perf-tests")) "io.gatling.highcharts" % "gatling-charts-highcharts" % "3.11.12" % "test" exclude ( "com.fasterxml.jackson.core", "jackson-databind" ), - "io.gatling" % "gatling-test-framework" % "3.11.2" % "test" exclude ("com.fasterxml.jackson.core", "jackson-databind"), + "io.gatling" % "gatling-test-framework" % "3.11.12" % "test" exclude ("com.fasterxml.jackson.core", "jackson-databind"), "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.17.1", "nl.grons" %% "metrics4-scala" % Versions.metrics4Scala % Test, "com.lihaoyi" %% "scalatags" % Versions.scalaTags % Test, - // Needs to match version used by Gatling - "com.github.scopt" %% "scopt" % "3.7.1", - "io.github.classgraph" % "classgraph" % "4.8.172" % Test, + "io.github.classgraph" % "classgraph" % "4.8.172", "org.http4s" %% "http4s-core" % Versions.http4s, "org.http4s" %% "http4s-dsl" % Versions.http4s, "org.http4s" %% "http4s-blaze-server" % Versions.http4sBlazeServer, diff --git a/perf-tests/README.md b/perf-tests/README.md index 68c77db26c..33d658d26c 100644 --- a/perf-tests/README.md +++ b/perf-tests/README.md @@ -2,41 +2,38 @@ To work with performance tests, make sure you are running JDK 21+, and that the `ALSO_LOOM` environment variable is set, because the `perf-tests` project includes `tapir-nima`, which require Loom JDK feature to be available. -Performance tests are executed by running `PerfTestSuiteRunner`, which is a standard "Main" Scala application, configured by command line parameters. It executes a sequence of tests, where -each test consist of: - -1. Starting a HTTP server if specified (Like Tapir-based Pekko, Vartx, http4s, or a "vanilla", tapirless one) -2. Running a simulation in warm-up mode (5 seconds, 3 concurrent users) -3. Running a simulation with user-defined duration and concurrent user count -4. Closing the server -5. Reading Gatling's simulation.log and building simulation results - -The sequence is repeated for a set of servers multiplied by simulations. Afterwards, all individual simulation results will be aggregated into a single report. -If no test servers are specified, only simulations are run, assuming a server started externally. -Command parameters can be viewed by running: +Performance tests are executed by running `perfTests/Gatling/testOnly sttp.tapir.perf.SimulationClassName`, assuming that a server under tests is available on `localhost:8080`. +## Starting the server +To run a test server, use a separate sbt session and start it using `ServerRunner`: +``` +perfTests/runMain sttp.tapir.perf.apis.ServerRunner http4s.TapirMulti +``` +Run it without server name to see a list of all available names. +Exception: If you're testing `NettySyncServer` (tapir-server-netty-sync), its server runner is located elsewhere: ``` -perfTests/Test/runMain sttp.tapir.perf.PerfTestSuiteRunner +nettyServerSync3/Test/runMain sttp.tapir.netty.sync.perf.NettySyncServerRunner ``` +This is caused by `perf-tests` using Scala 2.13 forced by Gatling, while `NettySyncServer` is written excluisively for Scala 3. + +## Configuring and running simulations -which displays help similar to: +Simulations can be found in `sttp.tapir.perf.Simulations.scala`. To run one, use Gatling/testOnly: ``` -[error] Usage: perf [options] -[error] -s, --server Comma-separated list of short server names, or groups like 'netty.*', 'pekko.*', etc. Available servers: http4s.TapirInterceptorMulti, http4s.TapirMulti, http4s.Tapir, http4s.VanillaMulti, http4s.Vanilla, netty.cats.TapirInterceptorMulti, netty.cats.TapirMulti, netty.cats.Tapir, netty.future.TapirInterceptorMulti, netty.future.TapirMulti, netty.future.Tapir, pekko.TapirInterceptorMulti, pekko.TapirMulti, pekko.Tapir, pekko.VanillaMulti, pekko.Vanilla, play.TapirInterceptorMulti, play.TapirMulti, play.Tapir, play.VanillaMulti, play.Vanilla, vertx.TapirInterceptorMulti, vertx.TapirMulti, vertx.Tapir, vertx.VanillaMulti, vertx.Vanilla, vertx.cats.TapirInterceptorMulti, vertx.cats.TapirMulti, vertx.cats.Tapir -[error] -m, --sim Comma-separated list of short simulation names, or '*' for all. Available simulations: PostBytes, PostFile, PostLongBytes, PostLongString, PostString, SimpleGetMultiRoute, SimpleGet -[error] -u, --users Number of concurrent users, default is 1 -[error] -d, --duration Single simulation duration in seconds, default is 10 -[error] -g, --gatling-reports Generate Gatling reports for individuals sims, may significantly affect total time (disabled by default) +perfTests/Gatling/testOnly sttp.tapir.perf.SimpleGetSimulation ``` -Generating Gatling reports is useful if you want to verify additional data like latency or throughput distribution over time. -If you want to run a test server separately from simulations, use a separate sbt session and start it using `ServerRunner`: +The simulation will first run in warmup mode, then it will run with specified user count and duration. To set these values, use system properties: +`tapir.perf.user-count` and `tapir.perf.duration-seconds`. These values can be passed to sbt console on startup: ``` -perfTests/runMain sttp.tapir.perf.apis.ServerRunner http4s.TapirMulti +sbt -Dtapir.perf.user-count=100 -Dtapir.perf.duration-seconds=60 ``` - -This is useful when profiling, as `perfTests/runMain` will be a forked JVM isolated from the JVM that runs Gatling. +or within an already running interactive sbt session: +``` +set ThisBuild/javaOptions += ""-Dtapir.perf.user-count=100" +``` +If not set, default values will be used (see `sttp.tapir.perf.CommonSimulations`). ## Profiling @@ -56,34 +53,9 @@ After opening the flamegraph in your browser, use the spyglass icon to search fo Note that profiling noticeably affects performance, so it's recommended to measure throughput/latency without the profiler attached. -## Examples - -1. Run all sims on all pekko-http servers with other options set to default: -``` -perfTests/Test/runMain sttp.tapir.perf.PerfTestSuiteRunner -s pekko.* -m * -``` - -2. Run all sims on http4s servers, with each simulation running for 5 seconds: -``` -perfTests/Test/runMain sttp.tapir.perf.PerfTestSuiteRunner -s http4s.Tapir,http4s.TapirMulti,http4s.Vanilla,http4s.VanillaMulti -m * -d 5 -``` +## Latency reports -3. Run some simulations on some servers, with 3 concurrent users instead of default 1, each simulation running for 15 seconds, -and enabled Gatling report generation: -``` -perfTests/Test/runMain sttp.tapir.perf.PerfTestSuiteRunner -s http4s.Tapir,netty.future.Tapir,play.Tapir -m PostLongBytes,PostFile -d 15 -u 3 -g -``` - -4. Run a netty-cats server with profiling, and then PostBytes and PostLongBytes simulation in a separate sbt session, for 25 seconds: -``` -perfTests/runMain sttp.tapir.perf.apis.ServerRunner netty.cats.TapirMulti -// in a separate sbt session: -perfTest/Test/runMain sttp.tapir.perf.PerfTestSuiteRunner -m PostBytes,PostLongBytes -d 25 -``` - -## Reports - -Each single simulation results in a latency HDR Histogram report printed to stdout as well as a file: +Additionally to standard Gatling reports, each single simulation results in a latency HDR Histogram report printed to stdout as well as a file: ``` [info] ******* Histogram saved to /home/kc/code/oss/tapir/.sbt/matrix/perfTests/SimpleGetSimulation-2024-02-26_10_30_22 @@ -91,47 +63,11 @@ Each single simulation results in a latency HDR Histogram report printed to stdo You can use [HDR Histogram Plotter](https://hdrhistogram.github.io/HdrHistogram/plotFiles.html) to plot a set of such files. -The main report is generated after all tests, and contains results for standard Gatling latencies and mean throughput in a table combining -all servers and tests. They will be printed to a HTML and a CSV file after the suite finishes: -``` -[info] ******* Test Suite report saved to /home/alice/projects/tapir/.sbt/matrix/perfTests/tapir-perf-tests-2024-01-22_16_33_14.csv -[info] ******* Test Suite report saved to /home/alice/projects/tapir/.sbt/matrix/perfTests/tapir-perf-tests-2024-01-22_16_33_14.html -``` - -These reports include information about throughput and latency of each server for each simulation. - -How the aggregation works: After each non-warmup test the results are read from `simulation.log` produced by Gatling and aggregated by `GatlingLogProcessor`. -The processor then uses 'com.codehale.metrics.Histogram' to calculate -p99, p95, p75, and p50 percentiles for latencies of all requests sent during the simulation. - ## Adding new servers and simulations To add a new server, go to `src/main/scala` and put an object extending `sttp.tapir.perf.apis.ServerRunner` in a subpackage of `sttp.tapir.perf`. -It should be automatically resoled by the `TypeScanner` utility used by the `PerfTestSuiteRunner`. +It should be automatically resolved by the `TypeScanner` utility used by the `ServerRunner`. Similarly with simulations. Go to `src/test/scala` and a class extending `sttp.tapir.perf.PerfTestSuiteRunnerSimulation` under `sttp.tapir.perf`. See the `Simulations.scala` file for examples. -## Testing WebSockets - -`WebSocketsSimulation` cannot be executed using `PerfTestSuiteRunner`, as it requires special warmup and injection setup, it also won't store gatling log in a format expected by our report builder. -For WebSockets we want to measure latency distribution, not throughput, so use given instructions to run it and read the report: - -1. Adjust simulation parameters in the `sttp.tapir.perf.WebSocketsSimulation` class -2. Start a server using `ServerRunner`, for example: -``` -perfTests/runMain sttp.tapir.perf.apis.ServerRunner http4s.Tapir -``` -If you're testing `NettySyncServer` (tapir-server-netty-sync), its server runner is located elsewhere: -``` -nettyServerSync3/Test/runMain sttp.tapir.netty.sync.perf.NettySyncServerRunner -``` -This is caused by `perf-tests` using Scala 2.13 forced by Gatling, while `NettySyncServer` is written excluisively for Scala 3. - -3. Run the simulation using Gatling's task: -``` -perfTests/Gatling/testOnly sttp.tapir.perf.WebSocketsSimulation -``` -4. A HdrHistogram report will be printed to stdout and to a file. Check output for the full path. -5. Stop the server manually. -6. Use [HDR Histogram Plotter](https://hdrhistogram.github.io/HdrHistogram/plotFiles.html) to plot histogram file(s) diff --git a/perf-tests/src/main/scala/sttp/tapir/perf/apis/ServerRunner.scala b/perf-tests/src/main/scala/sttp/tapir/perf/apis/ServerRunner.scala index 679aacccc2..1a34cddf9e 100644 --- a/perf-tests/src/main/scala/sttp/tapir/perf/apis/ServerRunner.scala +++ b/perf-tests/src/main/scala/sttp/tapir/perf/apis/ServerRunner.scala @@ -19,7 +19,9 @@ object ServerRunner extends IOApp { private val runtimeMirror = universe.runtimeMirror(getClass.getClassLoader) def run(args: List[String]): IO[ExitCode] = { - val shortServerName = args.head + val shortServerName = args.headOption.getOrElse { + throw new IllegalArgumentException(s"Unspecified server name. Use one of: ${TypeScanner.allServers}") + } for { killSwitch <- startServerByTypeName(ServerName.fromShort(shortServerName)) _ <- IO.never.guarantee(killSwitch) diff --git a/perf-tests/src/main/scala/sttp/tapir/perf/apis/TypeScanner.scala b/perf-tests/src/main/scala/sttp/tapir/perf/apis/TypeScanner.scala new file mode 100644 index 0000000000..9c6d86fe91 --- /dev/null +++ b/perf-tests/src/main/scala/sttp/tapir/perf/apis/TypeScanner.scala @@ -0,0 +1,38 @@ +package sttp.tapir.perf.apis + +import io.github.classgraph.ClassGraph + +import scala.jdk.CollectionConverters._ +import scala.reflect.ClassTag +import scala.util.{Failure, Success, Try} + +import sttp.tapir.perf.Common._ + +/** Uses the classgraph library to quickly find all possible server runners (objects extending ServerRunner) + */ +object TypeScanner { + def findAllImplementations[T: ClassTag](rootPackage: String): List[Class[_]] = { + val superClass = scala.reflect.classTag[T].runtimeClass + + val scanResult = new ClassGraph() + .enableClassInfo() + .acceptPackages(rootPackage) + .scan() + + try { + val classes = + if (superClass.isInterface) + scanResult.getClassesImplementing(superClass.getName) + else + scanResult.getSubclasses(superClass.getName) + classes.loadClasses().asScala.toList + } finally { + scanResult.close() + } + } + + lazy val allServers: List[String] = + findAllImplementations[ServerRunner](rootPackage) + .map(_.getName) + .map(c => c.stripPrefix(s"${rootPackage}.").stripSuffix("Server$")) +} diff --git a/perf-tests/src/test/scala/sttp/tapir/perf/CsvReportPrinter.scala b/perf-tests/src/test/scala/sttp/tapir/perf/CsvReportPrinter.scala deleted file mode 100644 index f95551cdaf..0000000000 --- a/perf-tests/src/test/scala/sttp/tapir/perf/CsvReportPrinter.scala +++ /dev/null @@ -1,32 +0,0 @@ -package sttp.tapir.perf - -object CsvReportPrinter { - def print(results: List[GatlingSimulationResult], initialSimOrdering: List[String]): String = { - - val groupedResults = results.groupBy(_.simulationName) - val orderedResults = initialSimOrdering.flatMap(simName => groupedResults.get(simName).map(simName -> _)) - val headers = "Simulation" :: orderedResults.head._2.flatMap(r => { - val server = r.serverName - List( - s"$server reqs/s", - s"$server latency-p99", - s"$server latency-p95", - s"$server latency-p75", - s"$server latency-p50" - ) - }) - val rows: List[String] = orderedResults.map { case (simName, serverResults) => - (simName :: serverResults.flatMap(singleResult => - List( - singleResult.meanReqsPerSec.toString, - singleResult.latencyP99.toString, - singleResult.latencyP95.toString, - singleResult.latencyP75.toString, - singleResult.latencyP75.toString - ) - )).mkString(",") - - } - (headers.mkString(",") :: rows).mkString("\n") - } -} diff --git a/perf-tests/src/test/scala/sttp/tapir/perf/GatlingLogProcessor.scala b/perf-tests/src/test/scala/sttp/tapir/perf/GatlingLogProcessor.scala deleted file mode 100644 index e43468ed4f..0000000000 --- a/perf-tests/src/test/scala/sttp/tapir/perf/GatlingLogProcessor.scala +++ /dev/null @@ -1,93 +0,0 @@ -package sttp.tapir.perf - -import cats.effect.IO -import cats.syntax.all._ -import com.codahale.metrics.{Histogram, MetricRegistry} -import fs2.io.file.{Files => Fs2Files} -import fs2.text -import sttp.tapir.perf.apis.ServerName - -import java.nio.file.{Files, Path, Paths} -import java.util.stream.Collectors -import scala.concurrent.duration._ -import scala.jdk.CollectionConverters._ -import scala.util.{Failure, Try} - -/** Reads all entries from Gatling simulation.log file and calculates mean throughput as well as p99, p95, p75 and p50 latencies. - */ -object GatlingLogProcessor { - - val LogFileName = "simulation.log" - - /** Searches for the last modified simulation.log in all simulation logs and calculates results. - */ - def processLast(simulationName: String, serverName: ServerName): IO[GatlingSimulationResult] = { - for { - lastLogPath <- IO.fromTry(findLastLogFile) - _ <- IO.println(s"Processing results from $lastLogPath") - result <- Fs2Files[IO] - .readAll(fs2.io.file.Path.fromNioPath(lastLogPath)) - .through(text.utf8.decode) - .through(text.lines) - .fold[State](State.initial) { (state, line) => - val parts = line.split("\\s+") - if (parts.length >= 5 && parts(0) == "REQUEST") { - val requestStartTime = parts(4).toLong - val minRequestTs = state.minRequestTs.min(requestStartTime) - val requestEndTime = parts(5).toLong - val maxResponseTs = state.maxResponseTs.max(requestEndTime) - val reqDuration = requestEndTime - requestStartTime - state.histogram.update(reqDuration) - State(state.histogram, minRequestTs, maxResponseTs) - } else state - } - .compile - .lastOrError - .ensure(new IllegalStateException(s"Could not read results from $lastLogPath"))(_.totalDurationMs != State.initial.totalDurationMs) - .map { state => - val snapshot = state.histogram.getSnapshot - val throughput = (state.histogram.getCount().toDouble / state.totalDurationMs) * 1000 - GatlingSimulationResult( - simulationName, - serverName.shortName, - state.totalDurationMs.millis, - meanReqsPerSec = throughput.toLong, - latencyP99 = snapshot.get99thPercentile, - latencyP95 = snapshot.get95thPercentile, - latencyP75 = snapshot.get75thPercentile, - latencyP50 = snapshot.getMedian - ) - } - } yield result - } - - private def findLastLogFile: Try[Path] = { - val baseDir = System.getProperty("user.dir") - println(s"Base dir = $baseDir") - val resultsDir: Path = Paths.get(baseDir).resolve("results") - Try { - findAllSimulationLogs(resultsDir).maxBy(p => Files.getLastModifiedTime(p)) - }.recoverWith { case err => - Failure(new IllegalStateException(s"Could not resolve last ${LogFileName} in ${resultsDir}", err)) - } - } - - private def findAllSimulationLogs(basePath: Path): List[Path] = { - Try { - Files - .walk(basePath) - .filter(path => Files.isRegularFile(path) && path.getFileName.toString == LogFileName) - .collect(Collectors.toList[Path]) - .asScala - .toList - }.getOrElse(List.empty[Path]) - } - - case class State(histogram: Histogram, minRequestTs: Long, maxResponseTs: Long) { - def totalDurationMs: Long = maxResponseTs - minRequestTs - } - - object State { - def initial: State = State(new MetricRegistry().histogram("tapir"), Long.MaxValue, 1L) - } -} diff --git a/perf-tests/src/test/scala/sttp/tapir/perf/GatlingRunner.scala b/perf-tests/src/test/scala/sttp/tapir/perf/GatlingRunner.scala deleted file mode 100644 index 345870b711..0000000000 --- a/perf-tests/src/test/scala/sttp/tapir/perf/GatlingRunner.scala +++ /dev/null @@ -1,28 +0,0 @@ -package sttp.tapir.perf - -import io.gatling.app.Gatling -import io.gatling.shared.cli.GatlingCliOptions - -object GatlingRunner { - - /** Blocking, runs the entire Gatling simulation. - */ - def runSimulationBlocking(simulationClassName: String, params: PerfTestSuiteParams): Unit = { - val reportsArr: Array[String] = - if (params.buildGatlingReports) Array.empty - else - Array( - s"--${GatlingCliOptions.NoReports.full}" - ) - val args = Array( - s"--${GatlingCliOptions.Simulation.full}", - s"$simulationClassName", - s"--${GatlingCliOptions.ResultsFolder.full}", - s"target/gatling/results", - ) ++ reportsArr - println(">>>>>>>>>>>>>>>>>>") - println(args.toList) - println(">>>>>>>>>>>>>>>>>>") - Gatling.main(args) - } -} diff --git a/perf-tests/src/test/scala/sttp/tapir/perf/HtmlReportPrinter.scala b/perf-tests/src/test/scala/sttp/tapir/perf/HtmlReportPrinter.scala deleted file mode 100644 index 81e1168196..0000000000 --- a/perf-tests/src/test/scala/sttp/tapir/perf/HtmlReportPrinter.scala +++ /dev/null @@ -1,45 +0,0 @@ -package sttp.tapir.perf - -import scalatags.Text.all._ -import scalatags.Text - -object HtmlReportPrinter { - val tableStyle = "border-collapse: collapse; font-family: Roboto, Helvetica, Arial, sans-serif;" - val cellStyle = "border: 1px solid black; padding: 5px;" - val headStyle = - "border: 1px solid black; padding: 5px; color: rgb(245, 245, 245); background-color: rgb(85, 73, 75)" - val simCellStyle = - "border: 1px solid black; padding: 5px; color: black; background-color: rgb(243, 112, 94); font-weight: bold" - - def print(results: List[GatlingSimulationResult]): String = { - - val headers = "Server" :: results.groupBy(_.serverName).head._2.map(_.simulationName) - createHtmlTable(headers, results.groupBy(_.serverName).values.toList.sortBy(_.head.serverName)) - } - - private def createHtmlTable(headers: Seq[String], rows: List[List[GatlingSimulationResult]]): String = { - - table(style := tableStyle)( - thead( - tr(headers.map(header => th(header, style := headStyle))) - ), - tbody( - for (row <- rows) yield { - tr(td(row.head.serverName, style := simCellStyle) :: row.map(toColumn), style := cellStyle) - } - ) - ).render - } - - private def toColumn(result: GatlingSimulationResult): Text.TypedTag[String] = - td( - Seq( - p(s"reqs/sec = ${result.meanReqsPerSec}"), - p(s"p99 latency = ${result.latencyP99}"), - p(s"p95 latency = ${result.latencyP95}"), - p(s"p75 latency = ${result.latencyP75}"), - p(s"p50 latency = ${result.latencyP50}") - ), - style := cellStyle - ) -} diff --git a/perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteParams.scala b/perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteParams.scala deleted file mode 100644 index 7724fddb95..0000000000 --- a/perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteParams.scala +++ /dev/null @@ -1,101 +0,0 @@ -package sttp.tapir.perf - -import scopt.OptionParser -import sttp.tapir.perf.apis.{ExternalServerName, ServerName} - -import scala.concurrent.duration._ -import scala.util.{Failure, Success} - -import Common._ - -/** Parameters to customize a suite of performance tests. */ -case class PerfTestSuiteParams( - shortServerNames: List[String] = Nil, - shortSimulationNames: List[String] = Nil, - users: Int = PerfTestSuiteParams.defaultUserCount, - durationSeconds: Int = PerfTestSuiteParams.defaultDurationSeconds, - buildGatlingReports: Boolean = false -) { - - /** Handles server names passed as groups like netty.*, pekko.*, etc. by expanding them into lists of actual server names. Similarly, - * handles '*' as a short simulation name, expanding it to a list of all simulations. - * @return - */ - def adjustWildcards: PerfTestSuiteParams = { - val withAdjustedServer: PerfTestSuiteParams = { - val expandedShortServerNames = shortServerNames.flatMap { shortServerName => - if (shortServerName.contains("*")) { - TypeScanner.allServers.filter(_.startsWith(shortServerName.stripSuffix("*"))) - } else List(shortServerName) - } - copy(shortServerNames = expandedShortServerNames) - } - if (shortSimulationNames == List("*")) - withAdjustedServer.copy(shortSimulationNames = TypeScanner.allSimulations) - else - withAdjustedServer - } - - def duration: FiniteDuration = durationSeconds.seconds - - def totalTests: Int = shortServerNames.length * shortSimulationNames.length - - def minTotalDuration: FiniteDuration = ((duration + WarmupDuration) * totalTests.toLong).toMinutes.minutes - - /** Returns list of server names - */ - def serverNames: List[ServerName] = - if (shortServerNames.nonEmpty) shortServerNames.map(ServerName.fromShort).distinct else List(ExternalServerName) - - /** Returns pairs of (fullSimulationName, shortSimulationName), for example: (sttp.tapir.perf.SimpleGetSimulation, SimpleGet) - */ - def simulationNames: List[(String, String)] = - shortSimulationNames.map(s => s"${rootPackage}.${s}Simulation").zip(shortSimulationNames).distinct -} - -object PerfTestSuiteParams { - val defaultUserCount = 1 - val defaultDurationSeconds = 10 - val argParser = new OptionParser[PerfTestSuiteParams]("perf") { - opt[Seq[String]]('s', "server") - .action((x, c) => c.copy(shortServerNames = x.toList)) - .text( - s"Comma-separated list of short server names, or groups like 'netty.*', 'pekko.*'. If empty, only simulations will be run, assuming already running server. Available servers: ${TypeScanner.allServers - .mkString(", ")}" - ): Unit - - opt[Seq[String]]('m', "sim") - .required() - .action((x, c) => c.copy(shortSimulationNames = x.toList)) - .text( - s"Comma-separated list of short simulation names, or '*' for all. Available simulations: ${TypeScanner.allSimulations.mkString(", ")}" - ): Unit - - opt[Int]('u', "users") - .action((x, c) => c.copy(users = x)) - .text(s"Number of concurrent users, default is $defaultUserCount"): Unit - - opt[Int]('d', "duration") - .action((x, c) => c.copy(durationSeconds = x)) - .text(s"Single simulation duration in seconds, default is $defaultDurationSeconds"): Unit - - opt[Unit]('g', "gatling-reports") - .action((_, c) => c.copy(buildGatlingReports = true)) - .text("Generate Gatling reports for individuals sims, may significantly affect total time (disabled by default)"): Unit - } - - def parse(args: List[String]): PerfTestSuiteParams = { - argParser.parse(args, PerfTestSuiteParams()) match { - case Some(p) => - val params = p.adjustWildcards - TypeScanner.enusureExist(params.shortServerNames, params.shortSimulationNames) match { - case Success(_) => params - case Failure(ex) => - println(ex.getMessage) - sys.exit(-1) - } - case _ => - sys.exit(-1) - } - } -} diff --git a/perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteResult.scala b/perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteResult.scala deleted file mode 100644 index 9d2b0d032b..0000000000 --- a/perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteResult.scala +++ /dev/null @@ -1,14 +0,0 @@ -package sttp.tapir.perf - -import scala.concurrent.duration.FiniteDuration - -case class GatlingSimulationResult( - simulationName: String, - serverName: String, - duration: FiniteDuration, - meanReqsPerSec: Long, - latencyP99: Double, - latencyP95: Double, - latencyP75: Double, - latencyP50: Double -) diff --git a/perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteRunner.scala b/perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteRunner.scala deleted file mode 100644 index 172c959eb7..0000000000 --- a/perf-tests/src/test/scala/sttp/tapir/perf/PerfTestSuiteRunner.scala +++ /dev/null @@ -1,89 +0,0 @@ -package sttp.tapir.perf - -import cats.effect.{ExitCode, IO, IOApp} -import cats.syntax.all._ -import fs2.io.file -import fs2.text -import sttp.tapir.perf.Common._ -import sttp.tapir.perf.apis.ServerRunner - -import java.nio.file.Paths -import java.time.LocalDateTime -import java.time.format.DateTimeFormatter -import scala.concurrent.duration.FiniteDuration - -/** Main entry point for running suites of performance tests and generating aggregated reports. A suite represents a set of Gatling - * simulations executed on a set of servers, with some additional parameters like concurrent user count. One can run a single simulation on - * a single server, as well as a selection of (servers x simulations). The runner then collects Gatling logs from simulation.log files of - * individual simulation runs and puts them together into an aggregated report comparing results for all the runs. If no server are - * provided in the arguments, the suite will only execute simulations, assuming a server has been started separately. - */ -object PerfTestSuiteRunner extends IOApp { - - def run(args: List[String]): IO[ExitCode] = { - val params = PerfTestSuiteParams.parse(args) - println("===========================================================================================") - println(s"Running a suite of ${params.totalTests} tests, each for ${params.users} users and ${params.duration}.") - println(s"Additional warm-up phase of $WarmupDuration will be performed before each simulation.") - println(s"Servers: ${params.shortServerNames}") - println(s"Simulations: ${params.shortSimulationNames}") - println(s"Expected total duration: at least ${params.minTotalDuration}") - println("Generated suite report paths will be printed to stdout after all tests are finished.") - println("===========================================================================================") - - val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH_mm_ss") - val currentTime = LocalDateTime.now().format(formatter) - ((params.simulationNames, params.serverNames) - .mapN((x, y) => (x, y))) - .traverse { case ((simulationName, shortSimulationName), serverName) => - for { - serverKillSwitch <- ServerRunner.startServerByTypeName(serverName) - _ <- IO.println(s"Running server ${serverName.shortName}, simulation $simulationName") - _ <- (for { - _ <- IO.println("======================== WARM-UP ===============================================") - _ = setSimulationParams(users = WarmupUsers, duration = WarmupDuration, warmup = true) - _ <- IO.blocking(GatlingRunner.runSimulationBlocking(simulationName, params)) // warm-up - _ <- IO.println("==================== WARM-UP COMPLETED =========================================") - _ = setSimulationParams(users = params.users, duration = params.duration, warmup = false) - _ <- IO.blocking(GatlingRunner.runSimulationBlocking(simulationName, params)) // actual test - } yield ()) - .guarantee(serverKillSwitch) - serverSimulationResult <- GatlingLogProcessor.processLast(shortSimulationName, serverName) - _ <- IO.println(serverSimulationResult) - } yield (serverSimulationResult) - } - .flatTap(writeCsvReport(currentTime, params.simulationNames.map(_._2))) - .flatTap(writeHtmlReport(currentTime)) - .as(ExitCode.Success) - } - - /** Gatling doesn't allow to pass parameters to simulations when they are run using `Gatling.fromMap()`, that's why we're using system - * parameters as global variables to customize some params. - */ - private def setSimulationParams(users: Int, duration: FiniteDuration, warmup: Boolean): Unit = { - System.setProperty("tapir.perf.user-count", users.toString) - System.setProperty("tapir.perf.duration-seconds", duration.toSeconds.toString) - System.setProperty("tapir.perf.is-warm-up", warmup.toString): Unit - } - - private def writeCsvReport(currentTime: String, initialSimOrdering: List[String])(results: List[GatlingSimulationResult]): IO[Unit] = { - val csv = CsvReportPrinter.print(results, initialSimOrdering) - writeReportFile(csv, "csv", currentTime) - } - - private def writeHtmlReport(currentTime: String)(results: List[GatlingSimulationResult]): IO[Unit] = { - val html = HtmlReportPrinter.print(results) - writeReportFile(html, "html", currentTime) - } - - private def writeReportFile(report: String, extension: String, currentTime: String): IO[Unit] = { - val baseDir = System.getProperty("user.dir") - val targetFilePath = Paths.get(baseDir).resolve(s"tapir-perf-tests-${currentTime}.$extension") - fs2.Stream - .emit(report) - .through(text.utf8.encode) - .through(file.Files[IO].writeAll(fs2.io.file.Path.fromNioPath(targetFilePath))) - .compile - .drain >> IO.println(s"******* Test Suite report saved to $targetFilePath") - } -} diff --git a/perf-tests/src/test/scala/sttp/tapir/perf/Simulations.scala b/perf-tests/src/test/scala/sttp/tapir/perf/Simulations.scala index 92f7757c92..769d25ffbc 100644 --- a/perf-tests/src/test/scala/sttp/tapir/perf/Simulations.scala +++ b/perf-tests/src/test/scala/sttp/tapir/perf/Simulations.scala @@ -12,6 +12,9 @@ import scala.util.Random object CommonSimulations { private val baseUrl = "127.0.0.1:8080" + val DefaultUserCount = 30 + val DefaultDurationSeconds = 30 + val WarmupDurationSeconds = 10 private val random = new Random() def randomByteArray(size: Int): Array[Byte] = { @@ -28,115 +31,101 @@ object CommonSimulations { def getParamOpt(paramName: String): Option[String] = Option(System.getProperty(s"tapir.perf.${paramName}")) - def getParam(paramName: String): String = - getParamOpt(paramName).getOrElse( - throw new IllegalArgumentException( - s"Missing tapir.perf.${paramName} system property, ensure you're running perf tests correctly (see perfTests/README.md)" - ) - ) + def userCount = getParamOpt("user-count").map(_.toInt).getOrElse(DefaultUserCount) + def duration(warmup: Boolean) = + if (warmup) WarmupDurationSeconds else getParamOpt("duration-seconds").map(_.toInt).getOrElse(DefaultDurationSeconds) + def namePrefix(warmup: Boolean) = if (warmup) "[WARMUP] " else "" - def userCount = getParam("user-count").toInt - def duration = getParam("duration-seconds").toInt - def namePrefix = if (getParamOpt("is-warm-up").map(_.toBoolean) == Some(true)) "[WARMUP] " else "" val responseTimeKey = "responseTime" def sessionSaveResponseTime = responseTimeInMillis.saveAs(responseTimeKey) - def recordResponseTime(histogram: Histogram): Expression[Session] = { session => - val responseTime = session("responseTime").as[Int] - histogram.recordValue(responseTime.toLong) + def handleLatencyHistogram(histogram: Histogram, warmup: Boolean): Expression[Session] = { session => + if (warmup) { + histogram.reset() + } else { + val responseTime = session("responseTime").as[Int] + histogram.recordValue(responseTime.toLong) + } session + } val httpProtocol = http.baseUrl(s"http://$baseUrl") val wsPubHttpProtocol = http.wsBaseUrl(s"ws://$baseUrl/ws") - def scenario_simple_get(routeNumber: Int, histogram: Histogram): PopulationBuilder = { + def scenario_simple_get(routeNumber: Int, histogram: Histogram, warmup: Boolean = false): PopulationBuilder = { val execHttpGet: ChainBuilder = exec( - http(s"HTTP GET /path$routeNumber/4") + http(s"${namePrefix(warmup)}HTTP GET /path$routeNumber/4") .get(s"/path$routeNumber/4") .check(sessionSaveResponseTime) ) - .exec(recordResponseTime(histogram)) + .exec(handleLatencyHistogram(histogram, warmup)) - scenario(s"${namePrefix}Repeatedly invoke GET of route number $routeNumber") - .during(duration)(execHttpGet) + scenario(s"${namePrefix(warmup)} Repeatedly invoke GET of route number $routeNumber") + .during(duration(warmup))(execHttpGet) .inject(atOnceUsers(userCount)) .protocols(httpProtocol) } - def scenario_post_string(routeNumber: Int, histogram: Histogram): PopulationBuilder = { + def scenario_post_string(routeNumber: Int, histogram: Histogram, warmup: Boolean = false): PopulationBuilder = { val execHttpPost = exec( - http(s"HTTP POST /path$routeNumber") + http(s"${namePrefix(warmup)}HTTP POST /path$routeNumber") .post(s"/path$routeNumber") .body(StringBody(_ => new String(randomAlphanumByteArray(256)))) .header("Content-Type", "text/plain") .check(sessionSaveResponseTime) ) - .exec(recordResponseTime(histogram)) + .exec(handleLatencyHistogram(histogram, warmup)) - scenario(s"${namePrefix}Repeatedly invoke POST with short string body") - .during(duration)(execHttpPost) + scenario(s"Repeatedly invoke POST with short string body") + .during(duration(warmup))(execHttpPost) .inject(atOnceUsers(userCount)) .protocols(httpProtocol) } - def scenario_post_bytes(routeNumber: Int, histogram: Histogram): PopulationBuilder = { + def scenario_post_bytes(routeNumber: Int, histogram: Histogram, warmup: Boolean = false): PopulationBuilder = { val execHttpPost = exec( - http(s"HTTP POST /pathBytes$routeNumber") + http(s"${namePrefix(warmup)}HTTP POST /pathBytes$routeNumber") .post(s"/pathBytes$routeNumber") .body(ByteArrayBody(_ => randomAlphanumByteArray(256))) .header("Content-Type", "text/plain") // otherwise Play complains .check(sessionSaveResponseTime) ) - .exec(recordResponseTime(histogram)) - - scenario(s"${namePrefix}Repeatedly invoke POST with short byte array body") - .during(duration)(execHttpPost) - .inject(atOnceUsers(userCount)) - .protocols(httpProtocol) - } - - def scenario_post_file(routeNumber: Int): PopulationBuilder = { - val execHttpPost = exec( - http(s"HTTP POST /pathFile$routeNumber") - .post(s"/pathFile$routeNumber") - .body(ByteArrayBody(constRandomLongBytes)) - .header("Content-Type", "application/octet-stream") - ) + .exec(handleLatencyHistogram(histogram, warmup)) - scenario(s"${namePrefix}Repeatedly invoke POST with file body") - .during(duration)(execHttpPost) + scenario(s"Repeatedly invoke POST with short byte array body") + .during(duration(warmup))(execHttpPost) .inject(atOnceUsers(userCount)) .protocols(httpProtocol) } - def scenario_post_long_bytes(routeNumber: Int, histogram: Histogram): PopulationBuilder = { + def scenario_post_long_bytes(routeNumber: Int, histogram: Histogram, warmup: Boolean = false): PopulationBuilder = { val execHttpPost = exec( - http(s"HTTP POST /pathBytes$routeNumber") + http(s"${namePrefix(warmup)}HTTP POST /pathBytes$routeNumber") .post(s"/pathBytes$routeNumber") .body(ByteArrayBody(constRandomLongAlphanumBytes)) .header("Content-Type", "text/plain") // otherwise Play complains .check(sessionSaveResponseTime) ) - .exec(recordResponseTime(histogram)) + .exec(handleLatencyHistogram(histogram, warmup)) - scenario(s"${namePrefix}Repeatedly invoke POST with large byte array") - .during(duration)(execHttpPost) + scenario(s"Repeatedly invoke POST with large byte array") + .during(duration(warmup))(execHttpPost) .inject(atOnceUsers(userCount)) .protocols(httpProtocol) } - def scenario_post_long_string(routeNumber: Int, histogram: Histogram): PopulationBuilder = { + def scenario_post_long_string(routeNumber: Int, histogram: Histogram, warmup: Boolean = false): PopulationBuilder = { val execHttpPost = exec( - http(s"HTTP POST /path$routeNumber") + http(s"${namePrefix(warmup)}HTTP POST /path$routeNumber") .post(s"/path$routeNumber") .body(ByteArrayBody(constRandomLongAlphanumBytes)) .header("Content-Type", "text/plain") .check(sessionSaveResponseTime) ) - .exec(recordResponseTime(histogram)) + .exec(handleLatencyHistogram(histogram, warmup)) - scenario(s"${namePrefix}Repeatedly invoke POST with large byte array, interpreted to a String") - .during(duration)(execHttpPost) + scenario(s"Repeatedly invoke POST with large byte array, interpreted to a String") + .during(duration(warmup))(execHttpPost) .inject(atOnceUsers(userCount)) .protocols(httpProtocol) } @@ -159,32 +148,39 @@ abstract class PerfTestSuiteRunnerSimulation extends Simulation { } class SimpleGetSimulation extends PerfTestSuiteRunnerSimulation { - setUp(scenario_simple_get(0, histogram)): Unit + val warmup = scenario_simple_get(0, histogram, warmup = true) + val measurements = scenario_simple_get(0, histogram) + setUp(warmup.andThen(measurements)): Unit } class SimpleGetMultiRouteSimulation extends PerfTestSuiteRunnerSimulation { - setUp(scenario_simple_get(127, histogram)): Unit + val warmup = scenario_simple_get(127, histogram, warmup = true) + val measurements = scenario_simple_get(127, histogram) + setUp(warmup.andThen(measurements)): Unit } class PostBytesSimulation extends PerfTestSuiteRunnerSimulation { - setUp(scenario_post_bytes(0, histogram)): Unit - + val warmup = scenario_post_bytes(0, histogram, warmup = true) + val measurements = scenario_post_bytes(0, histogram) + setUp(warmup.andThen(measurements)): Unit } class PostLongBytesSimulation extends PerfTestSuiteRunnerSimulation { - setUp(scenario_post_long_bytes(0, histogram)): Unit -} - -class PostFileSimulation extends PerfTestSuiteRunnerSimulation { - setUp(scenario_post_file(0)): Unit + val warmup = scenario_post_long_bytes(0, histogram, warmup = true) + val measurements = scenario_post_long_bytes(0, histogram) + setUp(warmup.andThen(measurements)): Unit } class PostStringSimulation extends PerfTestSuiteRunnerSimulation { - setUp(scenario_post_string(0, histogram)): Unit + val warmup = scenario_post_string(0, histogram, warmup = true) + val measurements = scenario_post_string(0, histogram) + setUp(warmup.andThen(measurements)): Unit } class PostLongStringSimulation extends PerfTestSuiteRunnerSimulation { - setUp(scenario_post_long_string(0, histogram)): Unit + val warmup = scenario_post_long_string(0, histogram, warmup = true) + val measurements = scenario_post_long_string(0, histogram) + setUp(warmup.andThen(measurements)): Unit } /** Based on https://github.com/kamilkloch/websocket-benchmark/ Can't be executed using PerfTestSuiteRunner, see perfTests/README.md @@ -228,7 +224,7 @@ class WebSocketsSimulation extends Simulation { session }) ) - .inject(rampUsers(scenarioUserCount).during(scenarioDuration)) + .inject(rampUsers(userCount).during(duration(warmup = true))) val measurement = scenario("WebSockets measurements") .exec( @@ -236,7 +232,7 @@ class WebSocketsSimulation extends Simulation { wsSubscribe("Subscribe", histogram), ws("Close WS").close ) - .inject(rampUsers(scenarioUserCount).during(scenarioDuration)) + .inject(rampUsers(userCount).during(duration(warmup = false))) setUp( warmup.andThen(measurement) diff --git a/perf-tests/src/test/scala/sttp/tapir/perf/TypeScanner.scala b/perf-tests/src/test/scala/sttp/tapir/perf/TypeScanner.scala deleted file mode 100644 index e6d8533904..0000000000 --- a/perf-tests/src/test/scala/sttp/tapir/perf/TypeScanner.scala +++ /dev/null @@ -1,64 +0,0 @@ -package sttp.tapir.perf - -import io.github.classgraph.ClassGraph -import sttp.tapir.perf.apis.ServerRunner - -import scala.jdk.CollectionConverters._ -import scala.reflect.ClassTag -import scala.util.{Failure, Success, Try} - -import Common._ - -/** Uses the classgraph library to quickly find all possible server runners (objects extending ServerRunner) or simulations (classes - * extending Simulation). - */ -object TypeScanner { - def findAllImplementations[T: ClassTag](rootPackage: String): List[Class[_]] = { - val superClass = scala.reflect.classTag[T].runtimeClass - - val scanResult = new ClassGraph() - .enableClassInfo() - .acceptPackages(rootPackage) - .scan() - - try { - val classes = - if (superClass.isInterface) - scanResult.getClassesImplementing(superClass.getName) - else - scanResult.getSubclasses(superClass.getName) - classes.loadClasses().asScala.toList - } finally { - scanResult.close() - } - } - - lazy val allServers: List[String] = - findAllImplementations[ServerRunner](rootPackage) - .map(_.getName) - .map(c => c.stripPrefix(s"${rootPackage}.").stripSuffix("Server$")) - - lazy val allSimulations: List[String] = - findAllImplementations[PerfTestSuiteRunnerSimulation](rootPackage) - .map(_.getName) - .map(c => c.stripPrefix(s"${rootPackage}.").stripSuffix("Simulation")) - - def enusureExist(serverShortNames: List[String], simShortNames: List[String]): Try[Unit] = { - val missingServers = serverShortNames.filterNot(allServers.contains) - val missingSims = simShortNames.filterNot(allSimulations.contains) - - if (missingServers.isEmpty && missingSims.isEmpty) { - Success(()) - } else { - val missingServersMessage = - if (missingServers.nonEmpty) - s"Unrecognized servers: ${missingServers.mkString(", ")}. Available servers: ${allServers.mkString(", ")}.\n" - else "" - val missingSimsMessage = - if (missingSims.nonEmpty) - s"Unrecognized simulations: ${missingSims.mkString(", ")}. Available simulations: ${allSimulations.mkString(", ")}" - else "" - Failure(new IllegalArgumentException(s"$missingServersMessage $missingSimsMessage".trim)) - } - } -}