From a2028005160c6262cb21be5a322168980a03f900 Mon Sep 17 00:00:00 2001 From: Travis Brown Date: Tue, 16 Dec 2014 00:01:03 +0000 Subject: [PATCH] Enable Scala 2.11 SBT builds for util-eval, ostrich, and finagle-ostrich4 Problem We want to publish 2.11 releases of Finagle subprojects that depend on Ostrich, which we have not previously built for 2.11 (because it has specs tests, and because it depends on scala-json and util-eval, neither of which previously worked on 2.11). Solution util-eval previously didn't work on 2.11 because `scala.ScalaObject` has been removed since 2.10. This is fixed here. The bulk of this commit is Rodrigo Lazoti's migration of Ostrich's tests from specs to ScalaTest (I'm not using his pull request since the PR also replaces scala-json with Jackson, which we don't want to do at this time to avoid breaking public APIs). This commit also reinstates util-eval and the Finagle subprojects that depend on Ostrich in their projects' cross-builds. Result util-eval, ostrich, finagle-ostrich4, finagle-example, and finagle-stress all build and pass tests on 2.11 (from SBT) for the first time. RB_ID=534127 --- .travis.yml | 18 +- project/Build.scala | 18 +- src/test/scala/BUILD | 4 +- .../ostrich/ConfiguredSpecification.scala | 27 -- .../ostrich/admin/AdminHttpServiceSpec.scala | 402 ---------------- .../ostrich/admin/AdminHttpServiceTest.scala | 434 ++++++++++++++++++ .../admin/RuntimeEnvironmentSpec.scala | 56 --- .../admin/RuntimeEnvironmentTest.scala | 57 +++ .../ostrich/admin/ServiceTrackerSpec.scala | 58 --- .../ostrich/admin/ServiceTrackerTest.scala | 61 +++ .../admin/TimeSeriesCollectorSpec.scala | 150 ------ .../admin/TimeSeriesCollectorTest.scala | 151 ++++++ .../admin/config/AdminServiceConfigSpec.scala | 139 ------ .../admin/config/AdminServiceConfigTest.scala | 159 +++++++ .../ostrich/stats/DistributionSpec.scala | 45 -- .../ostrich/stats/DistributionTest.scala | 59 +++ .../stats/GraphiteStatsLoggerSpec.scala | 80 ---- .../stats/GraphiteStatsLoggerTest.scala | 91 ++++ .../twitter/ostrich/stats/HistogramSpec.scala | 235 ---------- .../twitter/ostrich/stats/HistogramTest.scala | 309 +++++++++++++ .../ostrich/stats/JsonStatsFetcherSpec.scala | 57 --- .../ostrich/stats/JsonStatsLoggerSpec.scala | 79 ---- .../ostrich/stats/JsonStatsLoggerTest.scala | 89 ++++ .../stats/LocalStatsCollectionSpec.scala | 38 -- .../stats/LocalStatsCollectionTest.scala | 48 ++ .../twitter/ostrich/stats/MetricSpec.scala | 51 -- .../twitter/ostrich/stats/MetricTest.scala | 54 +++ .../ostrich/stats/StatsCollectionSpec.scala | 242 ---------- .../ostrich/stats/StatsCollectionTest.scala | 273 +++++++++++ .../ostrich/stats/StatsListenerSpec.scala | 191 -------- .../ostrich/stats/StatsListenerTest.scala | 196 ++++++++ .../{StatsSpec.scala => StatsTest.scala} | 23 +- .../ostrich/stats/W3CStatsLoggerSpec.scala | 116 ----- .../ostrich/stats/W3CStatsLoggerTest.scala | 132 ++++++ .../twitter/ostrich/stats/W3CStatsSpec.scala | 129 ------ .../twitter/ostrich/stats/W3CStatsTest.scala | 149 ++++++ 36 files changed, 2297 insertions(+), 2123 deletions(-) delete mode 100644 src/test/scala/com/twitter/ostrich/ConfiguredSpecification.scala delete mode 100644 src/test/scala/com/twitter/ostrich/admin/AdminHttpServiceSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/admin/AdminHttpServiceTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/admin/RuntimeEnvironmentSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/admin/RuntimeEnvironmentTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/admin/ServiceTrackerSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/admin/ServiceTrackerTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/admin/TimeSeriesCollectorSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/admin/TimeSeriesCollectorTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/admin/config/AdminServiceConfigSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/admin/config/AdminServiceConfigTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/stats/DistributionSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/stats/DistributionTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/stats/GraphiteStatsLoggerSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/stats/GraphiteStatsLoggerTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/stats/HistogramSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/stats/HistogramTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/stats/JsonStatsFetcherSpec.scala delete mode 100644 src/test/scala/com/twitter/ostrich/stats/JsonStatsLoggerSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/stats/JsonStatsLoggerTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/stats/LocalStatsCollectionSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/stats/LocalStatsCollectionTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/stats/MetricSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/stats/MetricTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/stats/StatsCollectionSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/stats/StatsCollectionTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/stats/StatsListenerSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/stats/StatsListenerTest.scala rename src/test/scala/com/twitter/ostrich/stats/{StatsSpec.scala => StatsTest.scala} (60%) delete mode 100644 src/test/scala/com/twitter/ostrich/stats/W3CStatsLoggerSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/stats/W3CStatsLoggerTest.scala delete mode 100644 src/test/scala/com/twitter/ostrich/stats/W3CStatsSpec.scala create mode 100644 src/test/scala/com/twitter/ostrich/stats/W3CStatsTest.scala diff --git a/.travis.yml b/.travis.yml index bce90464..63be9706 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,18 @@ language: scala + scala: - - 2.9.2 -script: "./sbt test" + - 2.10.4 + - 2.11.4 + +jdk: + - oraclejdk7 + # Note: not currently testing on JDK 8 internally + - oraclejdk8 + - openjdk7 + +before_script: + # default $SBT_OPTS is irrelevant to sbt lancher + - unset SBT_OPTS + +script: + - ./sbt ++$TRAVIS_SCALA_VERSION test diff --git a/project/Build.scala b/project/Build.scala index fa16f6de..bc76cd66 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -10,7 +10,7 @@ object Ostrich extends Build { name := "ostrich", version := libVersion, organization := "com.twitter", - crossScalaVersions := Seq("2.10.4"), + crossScalaVersions := Seq("2.10.4", "2.11.4"), javacOptions ++= Seq("-source", "1.6", "-target", "1.6"), javacOptions in doc := Seq("-source", "1.6"), parallelExecution in Test := false, @@ -20,21 +20,13 @@ object Ostrich extends Build { "com.twitter" %% "util-eval" % utilVersion, "com.twitter" %% "util-logging" % utilVersion, "com.twitter" %% "util-jvm" % utilVersion, - "com.twitter" %% "scala-json" % "3.0.1" + "com.twitter" %% "scala-json" % "3.0.2" ), libraryDependencies ++= Seq( - "org.scala-tools.testing" %% "specs" % "1.6.9" % "test" cross CrossVersion.binaryMapped { - case "2.9.2" => "2.9.1" - case "2.10.0" => "2.10" - case x => x - }, - "junit" % "junit" % "4.8.1" % "test", - "cglib" % "cglib" % "2.1_3" % "test", - "asm" % "asm" % "1.5.3" % "test", - "org.objenesis" % "objenesis" % "1.1" % "test", - "org.hamcrest" % "hamcrest-all" % "1.1" % "test", - "org.jmock" % "jmock" % "2.4.0" % "test" + "junit" % "junit" % "4.10" % "test", + "org.mockito" % "mockito-all" % "1.9.5" % "test", + "org.scalatest" %% "scalatest" % "2.2.2" % "test" ), publishMavenStyle := true, publishTo <<= version { (v: String) => diff --git a/src/test/scala/BUILD b/src/test/scala/BUILD index ba8ee339..7aedb6ac 100644 --- a/src/test/scala/BUILD +++ b/src/test/scala/BUILD @@ -1,10 +1,8 @@ junit_tests(name='scala', dependencies=[ - '3rdparty:cglib', - '3rdparty:jmock', '3rdparty:junit', '3rdparty:mockito-all', - '3rdparty:specs', + '3rdparty:scalatest', 'ostrich', 'util/util-core', 'util/util-logging', diff --git a/src/test/scala/com/twitter/ostrich/ConfiguredSpecification.scala b/src/test/scala/com/twitter/ostrich/ConfiguredSpecification.scala deleted file mode 100644 index 911675cd..00000000 --- a/src/test/scala/com/twitter/ostrich/ConfiguredSpecification.scala +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright 2011 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich - -import com.twitter.logging.{Level, Logger} -import org.specs.SpecificationWithJUnit - -trait ConfiguredSpecification extends SpecificationWithJUnit { - noDetailedDiffs() - - Logger.reset() - Logger.get("").setLevel(Level.OFF) -} diff --git a/src/test/scala/com/twitter/ostrich/admin/AdminHttpServiceSpec.scala b/src/test/scala/com/twitter/ostrich/admin/AdminHttpServiceSpec.scala deleted file mode 100644 index 2e3610ae..00000000 --- a/src/test/scala/com/twitter/ostrich/admin/AdminHttpServiceSpec.scala +++ /dev/null @@ -1,402 +0,0 @@ -/* - * Copyright 2009 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich -package admin - -import com.twitter.conversions.time._ -import com.twitter.json.Json -import com.twitter.logging.{Level, Logger} -import java.net.{Socket, SocketException, URI, URL} -import org.specs.SpecificationWithJUnit -import org.specs.util.DataTables -import scala.collection.Map -import scala.collection.JavaConverters._ -import scala.io.Source -import stats.{Stats, StatsListener} - -class AdminHttpServiceSpec extends ConfiguredSpecification with DataTables { - def get(path: String): String = { - val port = service.address.getPort - val url = new URL("http://localhost:%s%s".format(port, path)) - Source.fromURL(url).getLines().mkString("\n") - } - - def getHeaders(path: String): Map[String, List[String]] = { - val port = service.address.getPort - val url = new URL("http://localhost:%s%s".format(port, path)) - url.openConnection().getHeaderFields.asScala.mapValues { _.asScala.toList } - } - - var service: AdminHttpService = null - - "AdminHttpService" should { - doBefore { - service = - new AdminHttpService( - 0, - 20, - Stats, - new RuntimeEnvironment(getClass), - 30.seconds, - { code => /* system-exit is a noop here */ } - ) - service.start() - } - - doAfter { - Stats.clearAll() - StatsListener.clearAll() - service.shutdown() - } - - "FolderResourceHandler" in { - val staticHandler = new FolderResourceHandler("/nested") - - "split a URI" in { - staticHandler.getRelativePath("/nested/1level.txt") mustEqual "1level.txt" - staticHandler.getRelativePath("/nested/2level/2level.txt") mustEqual "2level/2level.txt" - } - - "build paths correctly" in { - staticHandler.buildPath("1level.txt") mustEqual "/nested/1level.txt" - staticHandler.buildPath("2level/2level.txt") mustEqual "/nested/2level/2level.txt" - } - - "load resources" in { - staticHandler.loadResource("nested/1level.txt") must throwA[Exception] - staticHandler.loadResource("/nested/1level.txt") mustNot throwA[Exception] - } - } - - "static resources" in { - "drawgraph.js" in { - val inputStream = getClass.getResourceAsStream("/static/drawgraph.js") - inputStream mustNot beNull - Source.fromInputStream(inputStream).mkString mustNot beNull - } - - "unnested" in { - val inputStream = getClass.getResourceAsStream("/unnested.txt") - Source.fromInputStream(inputStream).mkString must beMatching("we are not nested") - } - - "1 level of nesting" in { - val inputStream = getClass.getResourceAsStream("/nested/1level.txt") - Source.fromInputStream(inputStream).mkString must beMatching("nested one level deep") - } - - "2 levels of nesting" in { - val inputStream = getClass.getResourceAsStream("/nested/2levels/2levels.txt") - Source.fromInputStream(inputStream).mkString must beMatching("nested two levels deep") - } - } - - "start and stop" in { - val port = service.address.getPort - new Socket("localhost", port) must notBeNull - service.shutdown() - new Socket("localhost", port) must throwA[SocketException] - } - - "answer pings" in { - val port = service.address.getPort - val socket = new Socket("localhost", port) - get("/ping.json").trim mustEqual """{"response":"pong"}""" - - service.shutdown() - new Socket("localhost", port) must eventually(throwA[SocketException]) - } - - "shutdown" in { - val port = service.address.getPort - get("/shutdown.json") - new Socket("localhost", port) must eventually(throwA[SocketException]) - } - - "quiesce" in { - val port = service.address.getPort - get("/quiesce.json") - new Socket("localhost", port) must eventually(throwA[SocketException]) - } - - "get a proper web page back for the report URL" in { - get("/report/") must beMatching("Stats Report") - } - - "return 404 for favicon" in { - get("/favicon.ico") must throwA[java.io.FileNotFoundException] - } - - "return 404 for a missing command" in { - get("/bullshit.json") must throwA[java.io.FileNotFoundException] - } - - "not crash when fetching /" in { - get("/") must beMatching("ostrich") - } - - "tell us its ostrich version in the headers" in { - getHeaders("/").get("X-ostrich-version") must beSome[List[String]] - } - - "server info" in { - val serverInfo = get("/server_info.json") - serverInfo mustMatch("\"build\":") - serverInfo mustMatch("\"build_revision\":") - serverInfo mustMatch("\"name\":") - serverInfo mustMatch("\"version\":") - serverInfo mustMatch("\"start_time\":") - serverInfo mustMatch("\"uptime\":") - } - - "change log levels" in { - // Add a logger with a very specific name - val name = "logger-" + System.currentTimeMillis - val logger = Logger.get(name) // register this logger - logger.setLevel(Level.INFO) - - // no levels specified - var logLevels = get("/logging") - logLevels mustMatch(name) - logLevels mustMatch("Specify a logger name and level") - - // specified properly - logLevels = get("/logging?name=%s&level=FATAL".format(name)) - Logger.get(name).getLevel() must be_==(Level.FATAL) - logLevels mustMatch("Successfully changed the level of the following logger") - - // made up level - logLevels = get("/logging?name=%s&level=OHEMGEE".format(name)) - logLevels mustMatch("Logging level change failed") - - // made up logger - logLevels = get("/logging?name=OHEMGEEWHYAREYOUUSINGTHISLOGGERNAME&level=INFO") - logLevels mustMatch("Logging level change failed") - } - - "fetch static files" in { - get("/static/drawgraph.js") must include("drawChart") - } - - "mesos health" in { - get("/health") must include("OK") - } - - "mesos abortabortabort" in { - val port = service.address.getPort - get("/abortabortabort") - new Socket("localhost", port) must eventually(throwA[SocketException]) - } - - "mesos quitquitquit" in { - val port = service.address.getPort - get("/quitquitquit") - new Socket("localhost", port) must eventually(throwA[SocketException]) - } - - "thread contention" in { - val prof = get("/contention.json") - prof mustMatch("\"blocked_threads\":") - } - - "provide stats" in { - doAfter { - service.shutdown() - } - - "in json" in { - // make some statsy things happen - Stats.clearAll() - Stats.time("kangaroo_time") { Stats.incr("kangaroos", 1) } - - val stats = Json.parse(get("/stats.json")).asInstanceOf[Map[String, Map[String, AnyRef]]] - stats("gauges") must haveKey("jvm_uptime") - stats("gauges") must haveKey("jvm_heap_used") - stats("counters") must haveKey("kangaroos") - stats("metrics") must haveKey("kangaroo_time_msec") - - val timing = stats("metrics")("kangaroo_time_msec").asInstanceOf[Map[String, Int]] - timing("count") mustEqual 1 - timing("minimum") must be_>=(0) - timing("maximum") must be_>=(timing("minimum")) - } - - "in json, with custom listeners" in { - Stats.clearAll() - Stats.incr("apples", 10) - Stats.addMetric("oranges", 5) - - var absStats = Json.parse(get("/stats.json")).asInstanceOf[Map[String, Map[String, AnyRef]]] - absStats("counters")("apples") mustEqual 10 - absStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") mustEqual 1 - var namespaceStats = Json.parse(get("/stats.json?namespace=monkey")) - .asInstanceOf[Map[String, Map[String, AnyRef]]] - namespaceStats("counters")("apples") mustEqual 10 - namespaceStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") mustEqual 1 - var periodicStats = Json.parse(get("/stats.json?period=30")) - .asInstanceOf[Map[String, Map[String, AnyRef]]] - periodicStats("counters")("apples") mustEqual 10 - periodicStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") mustEqual 1 - - Stats.incr("apples", 6) - Stats.addMetric("oranges", 3) - absStats = Json.parse(get("/stats.json")).asInstanceOf[Map[String, Map[String, AnyRef]]] - absStats("counters")("apples") mustEqual 16 - absStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") mustEqual 2 - namespaceStats = Json.parse(get("/stats.json?namespace=monkey")) - .asInstanceOf[Map[String, Map[String, AnyRef]]] - namespaceStats("counters")("apples") mustEqual 6 - namespaceStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") mustEqual 1 - namespaceStats = Json.parse(get("/stats.json?namespace=monkey")) - .asInstanceOf[Map[String, Map[String, AnyRef]]] - namespaceStats("counters")("apples") mustEqual 0 - namespaceStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") mustEqual 0 - periodicStats = Json.parse(get("/stats.json?period=30")) - .asInstanceOf[Map[String, Map[String, AnyRef]]] - if (periodicStats("counters")("apples") == 6) { - // PeriodicBackgroundProcess aligns the first event to the multiple - // of the period + 1 so the first event can happen as soon as in two - // seconds. In the case of the first event already happens when we - // check the stats, we retry the test. - Stats.incr("apples", 8) - Stats.addMetric("oranges", 4) - periodicStats = Json.parse(get("/stats.json?period=30")) - .asInstanceOf[Map[String, Map[String, AnyRef]]] - periodicStats("counters")("apples") mustEqual 6 - periodicStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") mustEqual 1 - } else { - periodicStats("counters")("apples") mustEqual 10 - periodicStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") mustEqual 1 - } - } - - "in json, with histograms" in { - // make some statsy things happen - Stats.clearAll() - get("/stats.json") - Stats.addMetric("kangaroo_time", 1) - Stats.addMetric("kangaroo_time", 2) - Stats.addMetric("kangaroo_time", 3) - Stats.addMetric("kangaroo_time", 4) - Stats.addMetric("kangaroo_time", 5) - Stats.addMetric("kangaroo_time", 6) - - val stats = get("/stats.json") - val json = Json.parse(stats).asInstanceOf[Map[String, Map[String, AnyRef]]] - val timings = json("metrics")("kangaroo_time").asInstanceOf[Map[String, Int]] - - timings must haveKey("count") - timings("count") mustEqual 6 - - timings must haveKey("average") - timings("average") mustEqual 3 - - timings must haveKey("p50") - timings("p50") mustEqual 3 - - timings must haveKey("p99") - timings("p99") mustEqual 6 - - timings must haveKey("p999") - timings("p999") mustEqual 6 - - timings must haveKey("p9999") - timings("p9999") mustEqual 6 - } - - "in json, with histograms and reset" in { - Stats.clearAll() - // Add items indirectly to the histogram - Stats.addMetric("kangaroo_time", 1) - Stats.addMetric("kangaroo_time", 2) - Stats.addMetric("kangaroo_time", 3) - Stats.addMetric("kangaroo_time", 4) - Stats.addMetric("kangaroo_time", 5) - Stats.addMetric("kangaroo_time", 6) - - - val stats = get("/stats.json?reset") - val json = Json.parse(stats).asInstanceOf[Map[String, Map[String, AnyRef]]] - val timings = json("metrics")("kangaroo_time").asInstanceOf[Map[String, Int]] - - timings must haveKey("count") - timings("count") mustEqual 6 - - timings must haveKey("average") - timings("average") mustEqual 3 - - timings must haveKey("p50") - timings("p50") mustEqual 3 - - timings must haveKey("p95") - timings("p95") mustEqual 6 - - timings must haveKey("p99") - timings("p99") mustEqual 6 - - timings must haveKey("p999") - timings("p999") mustEqual 6 - - timings must haveKey("p9999") - timings("p9999") mustEqual 6 - } - - "in json, with callback" in { - val stats = get("/stats.json?callback=true") - stats.startsWith("ostrichCallback(") mustBe true - stats.endsWith(")") mustBe true - } - - "in json, with named callback" in { - val stats = get("/stats.json?callback=My.Awesome.Callback") - stats.startsWith("My.Awesome.Callback(") mustBe true - stats.endsWith(")") mustBe true - } - - "in json, with empty callback" in { - val stats = get("/stats.json?callback=") - stats.startsWith("ostrichCallback(") mustBe true - stats.endsWith(")") mustBe true - } - - "in text" in { - // make some statsy things happen - Stats.clearAll() - Stats.time("kangaroo_time") { Stats.incr("kangaroos", 1) } - - get("/stats.txt") must beMatching(" kangaroos: 1") - } - } - - "return 400 for /stats when collection period is below minimum" in { - get("/stats.json?period=10") must throwA[Exception] - } - - "parse parameters" in { - "uri" | "result" |> - "/p" ! Nil | - "/p?a=b" ! ("a", "b") :: Nil | - "/p?a=b&c=d" ! ("a", "b") :: ("c", "d") :: Nil | - "/p?" ! Nil | - "/p?invalid" ! Nil | - "/p?a=" ! ("a", "") :: Nil | - "/p?=b" ! ("", "b") :: Nil | { (uriStr, result) => - CgiRequestHandler.uriToParameters(new URI(uriStr)) mustEqual result - } - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/admin/AdminHttpServiceTest.scala b/src/test/scala/com/twitter/ostrich/admin/AdminHttpServiceTest.scala new file mode 100644 index 00000000..9041955a --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/admin/AdminHttpServiceTest.scala @@ -0,0 +1,434 @@ +/* + * Copyright 2009 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.admin + +import com.twitter.conversions.time._ +import com.twitter.json.Json +import com.twitter.logging.{Level, Logger} +import com.twitter.ostrich.stats.{Stats, StatsListener} +import java.net.{Socket, SocketException, URI, URL} +import java.util.regex.Pattern +import org.junit.runner.RunWith +import org.scalatest.{BeforeAndAfter, FunSuite} +import org.scalatest.concurrent.{Eventually, IntegrationPatience} +import org.scalatest.junit.JUnitRunner +import org.scalatest.prop.TableDrivenPropertyChecks +import scala.collection.JavaConverters._ +import scala.io.Source + +@RunWith(classOf[JUnitRunner]) +class AdminHttpServiceTest extends FunSuite with BeforeAndAfter + with TableDrivenPropertyChecks + with Eventually + with IntegrationPatience { + + class Context {} + + def get(path: String): String = { + val port = service.address.getPort + val url = new URL(f"http://localhost:$port%d$path%s") + Source.fromURL(url).getLines().mkString("\n") + } + + def getHeaders(path: String): Map[String, List[String]] = { + val port = service.address.getPort + val url = new URL(f"http://localhost:$port%d$path%s") + url.openConnection().getHeaderFields.asScala.toMap.mapValues { _.asScala.toList } + } + + var service: AdminHttpService = null + + before { + service = + new AdminHttpService( + 0, + 20, + Stats, + new RuntimeEnvironment(getClass), + 30.seconds, + { code => /* system-exit is a noop here */ } + ) + service.start() + } + + after { + Stats.clearAll() + StatsListener.clearAll() + service.shutdown() + } + + test("FolderResourceHandler") { + val staticHandler = new FolderResourceHandler("/nested") + + info("split a URI") + assert(staticHandler.getRelativePath("/nested/1level.txt") === "1level.txt") + assert(staticHandler.getRelativePath("/nested/2level/2level.txt") === "2level/2level.txt") + + + info("build paths correctly") + assert(staticHandler.buildPath("1level.txt") === "/nested/1level.txt") + assert(staticHandler.buildPath("2level/2level.txt") === "/nested/2level/2level.txt") + + info("load resources") + intercept[Exception] { staticHandler.loadResource("nested/1level.txt") } + try { + staticHandler.loadResource("/nested/1level.txt") + } catch { + case e: Exception => fail("staticHandler should not throw an exception") + } + } + + test("static resources") { + new Context { + info("drawgraph.js") + val inputStream = getClass.getResourceAsStream("/static/drawgraph.js") + assert(inputStream !== null) + assert(Source.fromInputStream(inputStream).mkString !== null) + } + + new Context { + info("unnested") + val inputStream = getClass.getResourceAsStream("/unnested.txt") + assert(Pattern.matches("we are not nested", Source.fromInputStream(inputStream).getLines.mkString)) + } + + new Context { + info("1 level of nesting") + val inputStream = getClass.getResourceAsStream("/nested/1level.txt") + assert(Pattern.matches("nested one level deep", Source.fromInputStream(inputStream).getLines.mkString)) + } + + new Context { + info("2 levels of nesting") + val inputStream = getClass.getResourceAsStream("/nested/2levels/2levels.txt") + assert(Pattern.matches("nested two levels deep", Source.fromInputStream(inputStream).getLines.mkString)) + } + } + + test("start and stop") { + val port = service.address.getPort + assert(new Socket("localhost", port) !== null) + service.shutdown() + intercept[SocketException] { new Socket("localhost", port) } + } + + test("answer pings") { + val port = service.address.getPort + val socket = new Socket("localhost", port) + assert(get("/ping.json").trim === """{"response":"pong"}""") + + service.shutdown() + intercept[SocketException] { new Socket("localhost", port) } + } + + test("shutdown") { + val port = service.address.getPort + get("/shutdown.json") + eventually { + intercept[SocketException] { new Socket("localhost", port) } + } + } + + test("quiesce") { + val port = service.address.getPort + get("/quiesce.json") + eventually { + intercept[SocketException] { new Socket("localhost", port) } + } + } + + test("get a proper web page back for the report URL") { + assert(get("/report/").contains("Stats Report")) + } + + test("return 404 for favicon") { + intercept[java.io.FileNotFoundException] { get("/favicon.ico") } + } + + test("return 404 for a missing command") { + intercept[java.io.FileNotFoundException] { get("/bullshit.json") } + } + + test("not crash when fetching /") { + assert(get("/").contains("ostrich")) + } + + test("tell us its ostrich version in the headers") { + assert(getHeaders("/").get("X-ostrich-version").isInstanceOf[Some[List[String]]]) + } + + test("server info") { + val serverInfo = get("/server_info.json") + assert(serverInfo.contains("\"build\":")) + assert(serverInfo.contains("\"build_revision\":")) + assert(serverInfo.contains("\"name\":")) + assert(serverInfo.contains("\"version\":")) + assert(serverInfo.contains("\"start_time\":")) + assert(serverInfo.contains("\"uptime\":")) + } + + test("change log levels") { + // Add a logger with a very specific name + val name = "logger-" + System.currentTimeMillis + val logger = Logger.get(name) // register this logger + logger.setLevel(Level.INFO) + + // no levels specified + var logLevels = get("/logging") + assert(logLevels.contains(name)) + assert(logLevels.contains("Specify a logger name and level")) + + // specified properly + logLevels = get("/logging?name=%s&level=FATAL".format(name)) + assert(Logger.get(name).getLevel() === Level.FATAL) + assert(logLevels.contains("Successfully changed the level of the following logger")) + + // made up level + logLevels = get("/logging?name=%s&level=OHEMGEE".format(name)) + assert(logLevels.contains("Logging level change failed")) + + // made up logger + logLevels = get("/logging?name=OHEMGEEWHYAREYOUUSINGTHISLOGGERNAME&level=INFO") + assert(logLevels.contains("Logging level change failed")) + } + + test("fetch static files") { + assert(get("/static/drawgraph.js").contains("drawChart")) + } + + test("mesos health") { + assert(get("/health").contains("OK")) + } + + test("mesos abortabortabort") { + val port = service.address.getPort + get("/abortabortabort") + eventually { + intercept[SocketException] { new Socket("localhost", port) } + } + } + + test("mesos quitquitquit") { + val port = service.address.getPort + get("/quitquitquit") + eventually { + intercept[SocketException] { new Socket("localhost", port) } + } + } + + test("thread contention") { + val prof = get("/contention.json") + assert(prof.contains("\"blocked_threads\":")) + } + + test("provide stats") { + new Context { + info("in json") + // make some statsy things happen + Stats.clearAll() + Stats.time("kangaroo_time") { Stats.incr("kangaroos", 1) } + + val stats = Json.parse(get("/stats.json")).asInstanceOf[Map[String, Map[String, AnyRef]]] + assert(stats("gauges").get("jvm_uptime").isDefined) + assert(stats("gauges").get("jvm_heap_used").isDefined) + assert(stats("counters").get("kangaroos").isDefined) + assert(stats("metrics").get("kangaroo_time_msec").isDefined) + + val timing = stats("metrics")("kangaroo_time_msec").asInstanceOf[Map[String, Int]] + assert(timing("count") === 1) + assert(timing("minimum") >= 0) + assert(timing("maximum") >= timing("minimum")) + } + + new Context { + info("in json, with custom listeners") + Stats.clearAll() + Stats.incr("apples", 10) + Stats.addMetric("oranges", 5) + + var absStats = Json.parse(get("/stats.json")).asInstanceOf[Map[String, Map[String, AnyRef]]] + assert(absStats("counters")("apples") === 10) + assert(absStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") === 1) + var namespaceStats = Json.parse(get("/stats.json?namespace=monkey")) + .asInstanceOf[Map[String, Map[String, AnyRef]]] + assert(namespaceStats("counters")("apples") === 10) + assert(namespaceStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") === 1) + var periodicStats = Json.parse(get("/stats.json?period=30")) + .asInstanceOf[Map[String, Map[String, AnyRef]]] + assert(periodicStats("counters")("apples") === 10) + assert(periodicStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") === 1) + + Stats.incr("apples", 6) + Stats.addMetric("oranges", 3) + absStats = Json.parse(get("/stats.json")).asInstanceOf[Map[String, Map[String, AnyRef]]] + assert(absStats("counters")("apples") === 16) + assert(absStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") === 2) + namespaceStats = Json.parse(get("/stats.json?namespace=monkey")) + .asInstanceOf[Map[String, Map[String, AnyRef]]] + assert(namespaceStats("counters")("apples") === 6) + assert(namespaceStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") === 1) + namespaceStats = Json.parse(get("/stats.json?namespace=monkey")) + .asInstanceOf[Map[String, Map[String, AnyRef]]] + assert(namespaceStats("counters")("apples") === 0) + assert(namespaceStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") === 0) + periodicStats = Json.parse(get("/stats.json?period=30")) + .asInstanceOf[Map[String, Map[String, AnyRef]]] + if (periodicStats("counters")("apples") == 6) { + // PeriodicBackgroundProcess aligns the first event to the multiple + // of the period + 1 so the first event can happen as soon as in two + // seconds. In the case of the first event already happens when we + // check the stats, we retry the test. + Stats.incr("apples", 8) + Stats.addMetric("oranges", 4) + periodicStats = Json.parse(get("/stats.json?period=30")) + .asInstanceOf[Map[String, Map[String, AnyRef]]] + assert(periodicStats("counters")("apples") === 6) + assert(periodicStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") === 1) + } else { + assert(periodicStats("counters")("apples") === 10) + assert(periodicStats("metrics")("oranges").asInstanceOf[Map[String, AnyRef]]("count") === 1) + } + } + + new Context { + info("in json, with histograms") + // make some statsy things happen + Stats.clearAll() + get("/stats.json") + Stats.addMetric("kangaroo_time", 1) + Stats.addMetric("kangaroo_time", 2) + Stats.addMetric("kangaroo_time", 3) + Stats.addMetric("kangaroo_time", 4) + Stats.addMetric("kangaroo_time", 5) + Stats.addMetric("kangaroo_time", 6) + + val stats = get("/stats.json") + val json = Json.parse(stats).asInstanceOf[Map[String, Map[String, AnyRef]]] + val timings = json("metrics")("kangaroo_time").asInstanceOf[Map[String, Int]] + + assert(timings.get("count").isDefined) + assert(timings("count") === 6) + + assert(timings.get("average").isDefined) + assert(timings("average") === 3) + + assert(timings.get("p50").isDefined) + assert(timings("p50") === 3) + + assert(timings.get("p99").isDefined) + assert(timings("p99") === 6) + + assert(timings.get("p999").isDefined) + assert(timings("p999") === 6) + + assert(timings.get("p9999").isDefined) + assert(timings("p9999") === 6) + } + + new Context { + info("in json, with histograms and reset") + Stats.clearAll() + // Add items indirectly to the histogram + Stats.addMetric("kangaroo_time", 1) + Stats.addMetric("kangaroo_time", 2) + Stats.addMetric("kangaroo_time", 3) + Stats.addMetric("kangaroo_time", 4) + Stats.addMetric("kangaroo_time", 5) + Stats.addMetric("kangaroo_time", 6) + + + val stats = get("/stats.json?reset") + val json = Json.parse(stats).asInstanceOf[Map[String, Map[String, AnyRef]]] + val timings = json("metrics")("kangaroo_time").asInstanceOf[Map[String, Int]] + + assert(timings.get("count").isDefined) + assert(timings("count") === 6) + + assert(timings.get("average").isDefined) + assert(timings("average") === 3) + + assert(timings.get("p50").isDefined) + assert(timings("p50") === 3) + + assert(timings.get("p95").isDefined) + assert(timings("p95") === 6) + + assert(timings.get("p99").isDefined) + assert(timings("p99") === 6) + + assert(timings.get("p999").isDefined) + assert(timings("p999") === 6) + + assert(timings.get("p9999").isDefined) + assert(timings("p9999") === 6) + } + + + new Context { + info("in json, with callback") + val stats = get("/stats.json?callback=true") + assert(stats.startsWith("ostrichCallback(")) + assert(stats.endsWith(")")) + } + + new Context { + info("in json, with named callback") + val stats = get("/stats.json?callback=My.Awesome.Callback") + assert(stats.startsWith("My.Awesome.Callback(")) + assert(stats.endsWith(")")) + } + + new Context { + info("in json, with empty callback") + val stats = get("/stats.json?callback=") + assert(stats.startsWith("ostrichCallback(")) + assert(stats.endsWith(")")) + } + + new Context { + info("in text") + // make some statsy things happen + Stats.clearAll() + Stats.time("kangaroo_time") { Stats.incr("kangaroos", 1) } + + assert(get("/stats.txt").contains(" kangaroos: 1")) + } + } + + test("return 400 for /stats when collection period is below minimum") { + intercept[Exception] { get("/stats.json?period=10") } + } + + test("parse parameters") { + val parametersTable = + Table( + ("uri", "result"), + ("/p", Nil), + ("/p?a=b", ("a", "b") :: Nil), + ("/p?a=b&c=d", ("a", "b") :: ("c", "d") :: Nil), + ("/p?", Nil), + ("/p?invalid", Nil), + ("/p?a=", ("a", "") :: Nil), + ("/p?=b", ("", "b") :: Nil) + ) + + forAll (parametersTable) { (uriStr: String, result: List[(String, String)]) => + assert(CgiRequestHandler.uriToParameters(new URI(uriStr)) === result) + } + } + +} diff --git a/src/test/scala/com/twitter/ostrich/admin/RuntimeEnvironmentSpec.scala b/src/test/scala/com/twitter/ostrich/admin/RuntimeEnvironmentSpec.scala deleted file mode 100644 index 53fe4ce4..00000000 --- a/src/test/scala/com/twitter/ostrich/admin/RuntimeEnvironmentSpec.scala +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2010 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich -package admin - -import com.twitter.io.TempFile -import org.specs.SpecificationWithJUnit -import stats.Histogram - -class RuntimeEnvironmentSpec extends SpecificationWithJUnit { - val config = TempFile.fromResourcePath("/config.scala").getAbsolutePath - - "RuntimeEnvironment" should { - "find executable jar path" in { - val runtime = new RuntimeEnvironment(classOf[Histogram]) - runtime.findCandidateJar(List("./dist/flockdb/flockdb-1.4.1.jar"), "flockdb", "1.4.1") mustEqual - Some("./dist/flockdb/flockdb-1.4.1.jar") - runtime.findCandidateJar(List("./dist/flockdb/flockdb_2.7.7-1.4.1.jar"), "flockdb", "1.4.1") mustEqual - Some("./dist/flockdb/flockdb_2.7.7-1.4.1.jar") - runtime.findCandidateJar(List("./dist/flockdb/wrong-1.4.1.jar"), "flockdb", "1.4.1") mustEqual - None - runtime.findCandidateJar(List("./dist/flockdb/flockdb-1.4.1-SNAPSHOT.jar"), "flockdb", "1.4.1-SNAPSHOT") mustEqual - Some("./dist/flockdb/flockdb-1.4.1-SNAPSHOT.jar") - } - - "parse custom args" in { - val runtime = new RuntimeEnvironment(classOf[Object]) - System.getProperty("foo") mustBe null - runtime.parseArgs(List("-D", "foo=bar")) - runtime.arguments.get("foo") mustEqual Some("bar") - System.getProperty("foo") mustEqual "bar" - System.clearProperty("foo") // allow this test to be run multiple times - } - - "load a config" in { - val runtime = new RuntimeEnvironment(classOf[Object]) - runtime.parseArgs(List("-f", config)) - val res: String = runtime.loadConfig() - res mustEqual "foo" - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/admin/RuntimeEnvironmentTest.scala b/src/test/scala/com/twitter/ostrich/admin/RuntimeEnvironmentTest.scala new file mode 100644 index 00000000..d68c4a63 --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/admin/RuntimeEnvironmentTest.scala @@ -0,0 +1,57 @@ +/* + * Copyright 2010 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.admin + +import com.twitter.io.TempFile +import com.twitter.ostrich.stats.Histogram +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import org.scalatest.FunSuite + +@RunWith(classOf[JUnitRunner]) +class RuntimeEnvironmentTest extends FunSuite { + + test("find executable jar path") { + val runtime = new RuntimeEnvironment(classOf[Histogram]) + assert(runtime.findCandidateJar(List("./dist/flockdb/flockdb-1.4.1.jar"), "flockdb", "1.4.1") === + Some("./dist/flockdb/flockdb-1.4.1.jar")) + assert(runtime.findCandidateJar(List("./dist/flockdb/flockdb_2.7.7-1.4.1.jar"), "flockdb", "1.4.1") === + Some("./dist/flockdb/flockdb_2.7.7-1.4.1.jar")) + assert(runtime.findCandidateJar(List("./dist/flockdb/wrong-1.4.1.jar"), "flockdb", "1.4.1") === + None) + assert(runtime.findCandidateJar(List("./dist/flockdb/flockdb-1.4.1-SNAPSHOT.jar"), "flockdb", "1.4.1-SNAPSHOT") === + Some("./dist/flockdb/flockdb-1.4.1-SNAPSHOT.jar")) + } + + test("parse custom args") { + val runtime = new RuntimeEnvironment(classOf[Object]) + assert(System.getProperty("foo") === null) + runtime.parseArgs(List("-D", "foo=bar")) + assert(runtime.arguments.get("foo") === Some("bar")) + assert(System.getProperty("foo") === "bar") + System.clearProperty("foo") // allow this test to be run multiple times + } + + test("load a config") { + val config = TempFile.fromResourcePath("/config.scala").getAbsolutePath + val runtime = new RuntimeEnvironment(classOf[Object]) + runtime.parseArgs(List("-f", config)) + val res: String = runtime.loadConfig() + assert(res === "foo") + } + +} diff --git a/src/test/scala/com/twitter/ostrich/admin/ServiceTrackerSpec.scala b/src/test/scala/com/twitter/ostrich/admin/ServiceTrackerSpec.scala deleted file mode 100644 index 34b34568..00000000 --- a/src/test/scala/com/twitter/ostrich/admin/ServiceTrackerSpec.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright 2009 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich -package admin - -import java.net.{Socket, SocketException, URL} -import scala.io.Source -import com.twitter.json.Json -import com.twitter.logging.{Level, Logger} -import org.specs.SpecificationWithJUnit -import org.specs.mock.JMocker -import stats.Stats - -class ServiceTrackerSpec extends SpecificationWithJUnit with JMocker { - "ServiceTracker" should { - val service = mock[Service] - - doBefore { - ServiceTracker.clearForTests() - } - - doAfter { - ServiceTracker.clearForTests() - } - - "shutdown" in { - ServiceTracker.register(service) - expect { one(service).shutdown() } - ServiceTracker.shutdown() - } - - "quiesce" in { - ServiceTracker.register(service) - expect { one(service).quiesce() } - ServiceTracker.quiesce() - } - - "reload" in { - ServiceTracker.register(service) - expect { one(service).reload() } - ServiceTracker.reload() - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/admin/ServiceTrackerTest.scala b/src/test/scala/com/twitter/ostrich/admin/ServiceTrackerTest.scala new file mode 100644 index 00000000..5d52e56b --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/admin/ServiceTrackerTest.scala @@ -0,0 +1,61 @@ +/* + * Copyright 2009 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.admin + +import com.twitter.json.Json +import com.twitter.logging.{Level, Logger} +import com.twitter.ostrich.stats.Stats +import java.net.{Socket, SocketException, URL} +import org.junit.runner.RunWith +import org.mockito.Mockito.{verify, times} +import org.scalatest.{BeforeAndAfter, FunSuite} +import org.scalatest.junit.JUnitRunner +import org.scalatest.mock.MockitoSugar +import scala.io.Source + +@RunWith(classOf[JUnitRunner]) +class ServiceTrackerTest extends FunSuite with BeforeAndAfter with MockitoSugar { + + val service = mock[Service] + + before { + ServiceTracker.clearForTests() + } + + after { + ServiceTracker.clearForTests() + } + + test("shutdown") { + ServiceTracker.register(service) + ServiceTracker.shutdown() + verify(service, times(1)).shutdown() + } + + test("quiesce") { + ServiceTracker.register(service) + ServiceTracker.quiesce() + verify(service, times(1)).quiesce() + } + + test("reload") { + ServiceTracker.register(service) + ServiceTracker.reload() + verify(service, times(1)).reload() + } + +} diff --git a/src/test/scala/com/twitter/ostrich/admin/TimeSeriesCollectorSpec.scala b/src/test/scala/com/twitter/ostrich/admin/TimeSeriesCollectorSpec.scala deleted file mode 100644 index 2c21ac4c..00000000 --- a/src/test/scala/com/twitter/ostrich/admin/TimeSeriesCollectorSpec.scala +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright 2010 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich -package admin - -import java.net.URL -import scala.collection.immutable -import scala.io.Source -import com.twitter.conversions.string._ -import com.twitter.conversions.time._ -import com.twitter.json.Json -import com.twitter.util.Time -import org.specs.SpecificationWithJUnit -import stats.Stats - -class TimeSeriesCollectorSpec extends SpecificationWithJUnit { - "TimeSeriesCollector" should { - var collector: TimeSeriesCollector = null - - doBefore { - Stats.clearAll() - collector = new TimeSeriesCollector() - } - - doAfter { - collector.shutdown() - } - - def getJson(port: Int, path: String) = { - val url = new URL("http://localhost:%d%s".format(port, path)) - Json.parse(Source.fromURL(url).getLines.mkString("\n")) - } - - "Stats.incr" in { - Time.withCurrentTimeFrozen { time => - Stats.incr("cats") - Stats.incr("dogs", 3) - collector.collector.periodic() - time.advance(1.minute) - Stats.incr("dogs", 60000) - collector.collector.periodic() - - val json = collector.get("counter:dogs", Nil) - val data = Json.parse(json).asInstanceOf[Map[String, Seq[Seq[Number]]]] - data("counter:dogs")(57) mustEqual List(2.minutes.ago.inSeconds, 0) - data("counter:dogs")(58) mustEqual List(1.minute.ago.inSeconds, 3) - data("counter:dogs")(59) mustEqual List(Time.now.inSeconds, 60000) - } - } - - "Stats.getCounter().update" in { - Time.withCurrentTimeFrozen { time => - Stats.getCounter("whales.tps").incr(10) - collector.collector.periodic() - time.advance(1.minute) - Stats.getCounter("whales.tps").incr(5) - collector.collector.periodic() - - val json = collector.get("counter:whales.tps", Nil) - val data = Json.parse(json).asInstanceOf[Map[String, Seq[Seq[Number]]]] - data("counter:whales.tps")(57) mustEqual List(2.minutes.ago.inSeconds, 0) - data("counter:whales.tps")(58) mustEqual List(1.minute.ago.inSeconds, 10) - data("counter:whales.tps")(59) mustEqual List(Time.now.inSeconds, 5) - } - } - - "Stats.getCounter saved in variable" in { - val whales = Stats.getCounter("whales.tps") - Time.withCurrentTimeFrozen { time => - whales.incr(10) - collector.collector.periodic() - time.advance(1.minute) - whales.incr(5) - collector.collector.periodic() - - val json = collector.get("counter:whales.tps", Nil) - val data = Json.parse(json).asInstanceOf[Map[String, Seq[Seq[Number]]]] - data("counter:whales.tps")(57) mustEqual List(2.minutes.ago.inSeconds, 0) - data("counter:whales.tps")(58) mustEqual List(1.minute.ago.inSeconds, 10) - data("counter:whales.tps")(59) mustEqual List(Time.now.inSeconds, 5) - } - } - - "fetch json via http" in { - Time.withCurrentTimeFrozen { time => - Stats.incr("cats") - Stats.incr("dogs", 3) - collector.collector.periodic() - time.advance(1.minute) - Stats.incr("dogs", 1) - collector.collector.periodic() - - val service = new AdminHttpService(0, 20, Stats, new RuntimeEnvironment(getClass)) - collector.registerWith(service) - service.start() - val port = service.address.getPort - try { - val keys = getJson(port, "/graph_data").asInstanceOf[Map[String, Seq[String]]] - keys("keys") mustContain "counter:dogs" - keys("keys") mustContain "counter:cats" - val data = getJson(port, "/graph_data/counter:dogs").asInstanceOf[Map[String, Seq[Seq[Number]]]] - data("counter:dogs")(57) mustEqual List(2.minutes.ago.inSeconds, 0) - data("counter:dogs")(58) mustEqual List(1.minute.ago.inSeconds, 3) - data("counter:dogs")(59) mustEqual List(Time.now.inSeconds, 1) - } finally { - service.shutdown() - } - } - } - - "fetch specific timing percentiles" in { - Time.withCurrentTimeFrozen { time => - Stats.addMetric("run", 5) - Stats.addMetric("run", 10) - Stats.addMetric("run", 15) - Stats.addMetric("run", 20) - collector.collector.periodic() - - val service = new AdminHttpService(0, 20, Stats, new RuntimeEnvironment(getClass)) - collector.registerWith(service) - service.start() - val port = service.address.getPort - try { - var data = getJson(port, "/graph_data/metric:run").asInstanceOf[Map[String, Seq[Seq[Number]]]] - data("metric:run")(59) mustEqual List(Time.now.inSeconds, 5, 10, 15, 19, 19, 19, 19, 19) - data = getJson(port, "/graph_data/metric:run?p=0,2").asInstanceOf[Map[String, Seq[Seq[Number]]]] - data("metric:run")(59) mustEqual List(Time.now.inSeconds, 5, 15) - data = getJson(port, "/graph_data/metric:run?p=1,7").asInstanceOf[Map[String, Seq[Seq[Number]]]] - data("metric:run")(59) mustEqual List(Time.now.inSeconds, 10, 19) - } finally { - service.shutdown() - } - } - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/admin/TimeSeriesCollectorTest.scala b/src/test/scala/com/twitter/ostrich/admin/TimeSeriesCollectorTest.scala new file mode 100644 index 00000000..c133233e --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/admin/TimeSeriesCollectorTest.scala @@ -0,0 +1,151 @@ +/* + * Copyright 2010 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.admin + +import com.twitter.conversions.string._ +import com.twitter.conversions.time._ +import com.twitter.json.Json +import com.twitter.ostrich.stats.Stats +import com.twitter.util.Time +import java.net.URL +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import org.scalatest.{FunSuite, BeforeAndAfter} +import scala.io.Source + +@RunWith(classOf[JUnitRunner]) +class TimeSeriesCollectorTest extends FunSuite with BeforeAndAfter { + + var collector: TimeSeriesCollector = null + + before { + Stats.clearAll() + collector = new TimeSeriesCollector() + } + + after { + collector.shutdown() + } + + def getJson(port: Int, path: String) = { + val url = new URL("http://localhost:%d%s".format(port, path)) + Json.parse(Source.fromURL(url).getLines.mkString("\n")) + } + + test("Stats.incr") { + Time.withCurrentTimeFrozen { time => + Stats.incr("cats") + Stats.incr("dogs", 3) + collector.collector.periodic() + time.advance(1.minute) + Stats.incr("dogs", 60000) + collector.collector.periodic() + + val json = collector.get("counter:dogs", Nil) + val data = Json.parse(json).asInstanceOf[Map[String, Seq[Seq[Number]]]] + assert(data("counter:dogs")(57) === List(2.minutes.ago.inSeconds, 0)) + assert(data("counter:dogs")(58) === List(1.minute.ago.inSeconds, 3)) + assert(data("counter:dogs")(59) === List(Time.now.inSeconds, 60000)) + } + } + + test("Stats.getCounter().update") { + Time.withCurrentTimeFrozen { time => + Stats.getCounter("whales.tps").incr(10) + collector.collector.periodic() + time.advance(1.minute) + Stats.getCounter("whales.tps").incr(5) + collector.collector.periodic() + + val json = collector.get("counter:whales.tps", Nil) + val data = Json.parse(json).asInstanceOf[Map[String, Seq[Seq[Number]]]] + assert(data("counter:whales.tps")(57) === List(2.minutes.ago.inSeconds, 0)) + assert(data("counter:whales.tps")(58) === List(1.minute.ago.inSeconds, 10)) + assert(data("counter:whales.tps")(59) === List(Time.now.inSeconds, 5)) + } + } + + test("Stats.getCounter saved in variable") { + val whales = Stats.getCounter("whales.tps") + Time.withCurrentTimeFrozen { time => + whales.incr(10) + collector.collector.periodic() + time.advance(1.minute) + whales.incr(5) + collector.collector.periodic() + + val json = collector.get("counter:whales.tps", Nil) + val data = Json.parse(json).asInstanceOf[Map[String, Seq[Seq[Number]]]] + assert(data("counter:whales.tps")(57) === List(2.minutes.ago.inSeconds, 0)) + assert(data("counter:whales.tps")(58) === List(1.minute.ago.inSeconds, 10)) + assert(data("counter:whales.tps")(59) === List(Time.now.inSeconds, 5)) + } + } + + test("fetch json via http") { + Time.withCurrentTimeFrozen { time => + Stats.incr("cats") + Stats.incr("dogs", 3) + collector.collector.periodic() + time.advance(1.minute) + Stats.incr("dogs", 1) + collector.collector.periodic() + + val service = new AdminHttpService(0, 20, Stats, new RuntimeEnvironment(getClass)) + collector.registerWith(service) + service.start() + val port = service.address.getPort + try { + val keys = getJson(port, "/graph_data").asInstanceOf[Map[String, Seq[String]]] + keys("keys").contains("counter:dogs") + keys("keys").contains("counter:cats") + val data = getJson(port, "/graph_data/counter:dogs").asInstanceOf[Map[String, Seq[Seq[Number]]]] + assert(data("counter:dogs")(57) === List(2.minutes.ago.inSeconds, 0)) + assert(data("counter:dogs")(58) === List(1.minute.ago.inSeconds, 3)) + assert(data("counter:dogs")(59) === List(Time.now.inSeconds, 1)) + } finally { + service.shutdown() + } + } + } + + test("fetch specific timing percentiles") { + Time.withCurrentTimeFrozen { time => + Stats.addMetric("run", 5) + Stats.addMetric("run", 10) + Stats.addMetric("run", 15) + Stats.addMetric("run", 20) + collector.collector.periodic() + + val service = new AdminHttpService(0, 20, Stats, new RuntimeEnvironment(getClass)) + collector.registerWith(service) + service.start() + val port = service.address.getPort + try { + var data = getJson(port, "/graph_data/metric:run").asInstanceOf[Map[String, Seq[Seq[Number]]]] + assert(data("metric:run")(59) === List(Time.now.inSeconds, 5, 10, 15, 19, 19, 19, 19, 19)) + data = getJson(port, "/graph_data/metric:run?p=0,2").asInstanceOf[Map[String, Seq[Seq[Number]]]] + assert(data("metric:run")(59) === List(Time.now.inSeconds, 5, 15)) + data = getJson(port, "/graph_data/metric:run?p=1,7").asInstanceOf[Map[String, Seq[Seq[Number]]]] + assert(data("metric:run")(59) === List(Time.now.inSeconds, 10, 19)) + } finally { + service.shutdown() + } + } + } + +} diff --git a/src/test/scala/com/twitter/ostrich/admin/config/AdminServiceConfigSpec.scala b/src/test/scala/com/twitter/ostrich/admin/config/AdminServiceConfigSpec.scala deleted file mode 100644 index 1f22861e..00000000 --- a/src/test/scala/com/twitter/ostrich/admin/config/AdminServiceConfigSpec.scala +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright 2009 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich -package admin -package config - -import java.net.{Socket, SocketException, URL} -import scala.io.Source -import com.twitter.conversions.time._ -import com.twitter.json.Json -import com.twitter.logging.{Level, Logger} -import org.specs.SpecificationWithJUnit -import org.specs.mock.{ClassMocker, JMocker} -import stats.{JsonStatsLogger, Stats, StatsListener, W3CStatsLogger} - -class AdminServiceConfigSpec extends SpecificationWithJUnit with JMocker with ClassMocker { - val port = 9990 - var service: AdminHttpService = null - val runtime = mock[RuntimeEnvironment] - - "AdminServiceConfig" should { - doBefore { - Logger.get("").setLevel(Level.OFF) - } - - doAfter { - ServiceTracker.shutdown() - } - - // Flaky test, see https://jira.twitter.biz/browse/CSL-1004 - if (!sys.props.contains("SKIP_FLAKY")) - "start up" in { - expect { - one(runtime).arguments willReturn Map.empty[String, String] - } - - new Socket("localhost", port) must throwA[SocketException] - val config = new AdminServiceConfig { - httpPort = 9990 - } - val service = config()(runtime) - new Socket("localhost", port) must notBeNull - ServiceTracker.shutdown() - new Socket("localhost", port) must throwA[SocketException] - } - - // Flaky test, see https://jira.twitter.biz/browse/CSL-1004 - if (!sys.props.contains("SKIP_FLAKY")) - "configure a json stats logger" in { - expect { - one(runtime).arguments willReturn Map.empty[String, String] - } - - val config = new AdminServiceConfig { - httpPort = 9990 - statsNodes = new StatsConfig { - reporters = new JsonStatsLoggerConfig { - loggerName = "json" - period = 1.second - serviceName = "hello" - } :: new TimeSeriesCollectorConfig() - } - } - val service = config()(runtime) - ServiceTracker.peek must exist { s => - s.isInstanceOf[JsonStatsLogger] && s.asInstanceOf[JsonStatsLogger].serviceName == Some("hello") - } - ServiceTracker.peek must exist { s => - s.isInstanceOf[TimeSeriesCollector] - } - } - - // Flaky test, see https://jira.twitter.biz/browse/CSL-1004 - if (!sys.props.contains("SKIP_FLAKY")) - "configure a w3c stats logger" in { - expect { - one(runtime).arguments willReturn Map.empty[String, String] - } - - val config = new AdminServiceConfig { - httpPort = 9990 - statsNodes = new StatsConfig { - reporters = new W3CStatsLoggerConfig { - loggerName = "w3c" - period = 1.second - } - } - } - val service = config()(runtime) - ServiceTracker.peek must exist { s => - s.isInstanceOf[W3CStatsLogger] && s.asInstanceOf[W3CStatsLogger].logger.name == "w3c" - } - } - - "configure filtered stats" in { - Stats.clearAll() - StatsListener.clearAll() - - expect { - one(runtime).arguments willReturn Map.empty[String, String] - } - - val config = new AdminServiceConfig { - httpPort = 0 - statsFilters = List("a.*".r, "jvm_.*".r) - } - val service = config()(runtime).get - - try { - Stats.incr("apples", 10) - Stats.addMetric("oranges", 5) - - val port = service.address.getPort - val path = "/stats.json?period=60&filtered=1" - val url = new URL("http://localhost:%s%s".format(port, path)) - val data = Source.fromURL(url).getLines().mkString("\n") - val json = Json.parse(data).asInstanceOf[Map[String, Map[String, AnyRef]]] - json("counters") mustEqual Map() - } finally { - service.shutdown() - } - } - - } -} diff --git a/src/test/scala/com/twitter/ostrich/admin/config/AdminServiceConfigTest.scala b/src/test/scala/com/twitter/ostrich/admin/config/AdminServiceConfigTest.scala new file mode 100644 index 00000000..aede985b --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/admin/config/AdminServiceConfigTest.scala @@ -0,0 +1,159 @@ +/* + * Copyright 2009 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.admin.config + +import com.twitter.conversions.time._ +import com.twitter.json.Json +import com.twitter.logging.{Level, Logger} +import com.twitter.ostrich.admin.{ + AdminHttpService, + RuntimeEnvironment, + ServiceTracker, + TimeSeriesCollector +} +import com.twitter.ostrich.stats.{JsonStatsLogger, Stats, StatsListener, W3CStatsLogger} +import java.net.{Socket, SocketException, URL} +import org.junit.runner.RunWith +import org.mockito.Mockito.{verify, times, when} +import org.scalatest.{BeforeAndAfter, FunSuite} +import org.scalatest.junit.JUnitRunner +import org.scalatest.mock.MockitoSugar +import scala.io.Source + +@RunWith(classOf[JUnitRunner]) +class AdminServiceConfigTest extends FunSuite with BeforeAndAfter with MockitoSugar { + + class Context { + val port = 9990 + var service: AdminHttpService = null + val runtime = mock[RuntimeEnvironment] + when(runtime.arguments) thenReturn Map.empty[String, String] + } + + before { + Logger.get("").setLevel(Level.OFF) + } + + after { + ServiceTracker.shutdown() + } + + // Flaky test, see https://jira.twitter.biz/browse/CSL-1004 + if (!sys.props.contains("SKIP_FLAKY")) + test("start up") { + val context = new Context + import context._ + + intercept[SocketException] { + new Socket("localhost", port) + } + + val config = new AdminServiceConfig { + httpPort = 9990 + } + val service = config()(runtime) + assert(new Socket("localhost", port) !== null) + ServiceTracker.shutdown() + + intercept[SocketException] { + new Socket("localhost", port) + } + + verify(runtime, times(1)).arguments + } + + // Flaky test, see https://jira.twitter.biz/browse/CSL-1004 + if (!sys.props.contains("SKIP_FLAKY")) + test("configure a json stats logger") { + val context = new Context + import context._ + + val config = new AdminServiceConfig { + httpPort = 9990 + statsNodes = new StatsConfig { + reporters = new JsonStatsLoggerConfig { + loggerName = "json" + period = 1.second + serviceName = "hello" + } :: new TimeSeriesCollectorConfig() + } + } + + val service = config()(runtime) + val jsonStatsLoggerConfig = ServiceTracker.peek.find(_.isInstanceOf[JsonStatsLogger]) + assert(jsonStatsLoggerConfig.isDefined && + jsonStatsLoggerConfig.get.asInstanceOf[JsonStatsLogger].serviceName === Some("hello")) + assert(ServiceTracker.peek.find(_.isInstanceOf[TimeSeriesCollector]).isDefined) + + verify(runtime, times(1)).arguments + } + + // Flaky test, see https://jira.twitter.biz/browse/CSL-1004 + if (!sys.props.contains("SKIP_FLAKY")) + test("configure a w3c stats logger") { + val context = new Context + import context._ + + val config = new AdminServiceConfig { + httpPort = 9990 + statsNodes = new StatsConfig { + reporters = new W3CStatsLoggerConfig { + loggerName = "w3c" + period = 1.second + } + } + } + val service = config()(runtime) + val w3cStatsLoggerConfig = ServiceTracker.peek.find(_.isInstanceOf[W3CStatsLogger]) + assert(w3cStatsLoggerConfig.isDefined && + w3cStatsLoggerConfig.get.asInstanceOf[W3CStatsLogger].logger.name == "w3c") + + verify(runtime, times(1)).arguments + } + + test("configure filtered stats") { + val context = new Context + import context._ + + Stats.clearAll() + StatsListener.clearAll() + + val config = new AdminServiceConfig { + httpPort = 0 + statsFilters = List("a.*".r, "jvm_.*".r) + } + val service = config()(runtime).get + + try { + Stats.incr("apples", 10) + Stats.addMetric("oranges", 5) + + val port = service.address.getPort + val path = "/stats.json?period=60&filtered=1" + val url = new URL("http://localhost:%s%s".format(port, path)) + val data = Source.fromURL(url).getLines().mkString("\n") + val json = Json.parse(data).asInstanceOf[Map[String, Map[String, AnyRef]]] + assert(json("counters") === Map.empty[String, Map[String, AnyRef]]) + } finally { + service.shutdown() + } + + verify(runtime, times(1)).arguments + } + + +} diff --git a/src/test/scala/com/twitter/ostrich/stats/DistributionSpec.scala b/src/test/scala/com/twitter/ostrich/stats/DistributionSpec.scala deleted file mode 100644 index 360adb4e..00000000 --- a/src/test/scala/com/twitter/ostrich/stats/DistributionSpec.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2011 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich.stats - -import org.specs.SpecificationWithJUnit - -class DistributionSpec extends SpecificationWithJUnit { - "Distribution" should { - val histogram0 = Histogram() - val histogram1 = Histogram(10) - val histogram2 = Histogram(10, 20) - - "equals" in { - Distribution(histogram1.clone()) mustEqual Distribution(histogram1.clone()) - Distribution(histogram1) must not(beEqual(Distribution(histogram2))) - } - - "toMap" in { - Distribution(histogram2).toMap mustEqual - Map("count" -> 2, "maximum" -> 19, "minimum" -> 10, "average" -> 15, "sum" -> 30, - "p50" -> 10, "p90" -> 19, "p95" -> 19, "p99" -> 19, "p999" -> 19, "p9999" -> 19) - Distribution(histogram0).toMap mustEqual Map("count" -> 0) - } - - "toJson" in { - Distribution(histogram2).toJson mustEqual - "{\"average\":15,\"count\":2,\"maximum\":19,\"minimum\":10,\"p50\":10," + - "\"p90\":19,\"p95\":19,\"p99\":19,\"p999\":19,\"p9999\":19,\"sum\":30}" - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/stats/DistributionTest.scala b/src/test/scala/com/twitter/ostrich/stats/DistributionTest.scala new file mode 100644 index 00000000..b899391e --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/stats/DistributionTest.scala @@ -0,0 +1,59 @@ +/* + * Copyright 2011 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.stats + +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import org.scalatest.FunSuite + +@RunWith(classOf[JUnitRunner]) +class DistributionTest extends FunSuite { + + class Context { + val histogram0 = Histogram() + val histogram1 = Histogram(10) + val histogram2 = Histogram(10, 20) + } + + test("equals") { + val context = new Context + import context._ + + assert(Distribution(histogram1.clone()) === Distribution(histogram1.clone())) + assert(Distribution(histogram1) !== Distribution(histogram2)) + } + + test("toMap") { + val context = new Context + import context._ + + assert(Distribution(histogram2).toMap === + Map("count" -> 2, "maximum" -> 19, "minimum" -> 10, "average" -> 15, "sum" -> 30, + "p50" -> 10, "p90" -> 19, "p95" -> 19, "p99" -> 19, "p999" -> 19, "p9999" -> 19)) + assert(Distribution(histogram0).toMap === Map("count" -> 0)) + } + + test("toJson") { + val context = new Context + import context._ + + assert(Distribution(histogram2).toJson === + "{\"average\":15,\"count\":2,\"maximum\":19,\"minimum\":10,\"p50\":10," + + "\"p90\":19,\"p95\":19,\"p99\":19,\"p999\":19,\"p9999\":19,\"sum\":30}") + } + +} diff --git a/src/test/scala/com/twitter/ostrich/stats/GraphiteStatsLoggerSpec.scala b/src/test/scala/com/twitter/ostrich/stats/GraphiteStatsLoggerSpec.scala deleted file mode 100644 index 2b4bd971..00000000 --- a/src/test/scala/com/twitter/ostrich/stats/GraphiteStatsLoggerSpec.scala +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2009 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich -package stats - -import com.twitter.conversions.time._ -import com.twitter.util.Time -import org.specs.SpecificationWithJUnit -import org.specs.SpecificationWithJUnit -import org.specs.mock.{ClassMocker, JMocker} -import java.net.Socket -import java.io.ByteArrayOutputStream - -class GraphiteStatsLoggerSpec extends SpecificationWithJUnit with JMocker with ClassMocker { - "GraphiteStatsLogger" should { - var out = new ByteArrayOutputStream - - val socket = mock[Socket] - - var collection: StatsCollection = null - var statsLogger: GraphiteStatsLogger = null - - doBefore { - expect { - atLeast(1).of(socket).getOutputStream willReturn out - one(socket).close - } - - collection = new StatsCollection() - statsLogger = new GraphiteStatsLogger("localhost", 1123, 1.second, "server_pool", None, collection) - } - - def getLines() = { - out.toString.split("\n").toList - } - - "log basic stats" in { - collection.incr("cats") - collection.incr("dogs", 3) - statsLogger.write(socket) - val lines = getLines().sorted - lines(0) must beMatching("server_pool\\.unknown\\.cats 1\\.00 [0-9]+") - lines(1) must beMatching("server_pool\\.unknown\\.dogs 3\\.00 [0-9]+") - } - - "log timings" in { - Time.withCurrentTimeFrozen { time => - collection.time("zzz") { time advance 10.milliseconds } - collection.time("zzz") { time advance 20.milliseconds } - statsLogger.write(socket) - val lines = getLines().sorted - lines(0) must beMatching("server_pool\\.unknown\\.zzz_msec_average 15\\.00 [0-9]+") - lines(7) must beMatching("server_pool\\.unknown\\.zzz_msec_p99 19\\.00 [0-9]+") - } - } - - "log gauges" in { - collection.setGauge("horse", 3.5) - collection.setGauge("cow", 123456789.0) - statsLogger.write(socket) - val lines = getLines().sorted - lines(0) must beMatching("server_pool\\.unknown\\.cow 123456789\\.00 [0-9]+") - lines(1) must beMatching("server_pool\\.unknown\\.horse 3\\.50 [0-9]+") - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/stats/GraphiteStatsLoggerTest.scala b/src/test/scala/com/twitter/ostrich/stats/GraphiteStatsLoggerTest.scala new file mode 100644 index 00000000..192f9d84 --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/stats/GraphiteStatsLoggerTest.scala @@ -0,0 +1,91 @@ +/* + * Copyright 2009 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.stats + +import com.twitter.conversions.time._ +import com.twitter.util.Time +import java.net.Socket +import java.io.ByteArrayOutputStream +import java.util.regex.Pattern +import org.junit.runner.RunWith +import org.mockito.Mockito.{verify, times, when} +import org.scalatest.FunSuite +import org.scalatest.junit.JUnitRunner +import org.scalatest.mock.MockitoSugar + +@RunWith(classOf[JUnitRunner]) +class GraphiteStatsLoggerTest extends FunSuite with MockitoSugar { + + class Context { + var out = new ByteArrayOutputStream + val socket = mock[Socket] + + var collection: StatsCollection = null + var statsLogger: GraphiteStatsLogger = null + + when(socket.getOutputStream) thenReturn out + + collection = new StatsCollection() + statsLogger = new GraphiteStatsLogger("localhost", 1123, 1.second, "server_pool", None, collection) + + def getLines() = { + out.toString.split("\n").toList + } + } + + test("log basic stats") { + val context = new Context + import context._ + + collection.incr("cats") + collection.incr("dogs", 3) + statsLogger.write(socket) + val lines = getLines().sorted + assert(Pattern.matches("server_pool\\.unknown\\.cats 1\\.00 [0-9]+", lines(0))) + assert(Pattern.matches("server_pool\\.unknown\\.dogs 3\\.00 [0-9]+", lines(1))) + verify(socket, times(1)).close + } + + test("log timings") { + val context = new Context + import context._ + + Time.withCurrentTimeFrozen { time => + collection.time("zzz") { time advance 10.milliseconds } + collection.time("zzz") { time advance 20.milliseconds } + statsLogger.write(socket) + val lines = getLines().sorted + assert(Pattern.matches("server_pool\\.unknown\\.zzz_msec_average 15\\.00 [0-9]+", lines(0))) + assert(Pattern.matches("server_pool\\.unknown\\.zzz_msec_p99 19\\.00 [0-9]+", lines(7))) + } + verify(socket, times(1)).close + } + + test("log gauges") { + val context = new Context + import context._ + + collection.setGauge("horse", 3.5) + collection.setGauge("cow", 123456789.0) + statsLogger.write(socket) + val lines = getLines().sorted + assert(Pattern.matches("server_pool\\.unknown\\.cow 123456789\\.00 [0-9]+", lines(0))) + assert(Pattern.matches("server_pool\\.unknown\\.horse 3\\.50 [0-9]+", lines(1))) + verify(socket, times(1)).close + } + +} diff --git a/src/test/scala/com/twitter/ostrich/stats/HistogramSpec.scala b/src/test/scala/com/twitter/ostrich/stats/HistogramSpec.scala deleted file mode 100644 index 130f4c61..00000000 --- a/src/test/scala/com/twitter/ostrich/stats/HistogramSpec.scala +++ /dev/null @@ -1,235 +0,0 @@ -/* - * Copyright 2010 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich.stats - -import org.specs.SpecificationWithJUnit -import org.specs.matcher.Matcher - -class HistogramSpec extends SpecificationWithJUnit { - "Histogram" should { - val histogram = new Histogram() - val histogram2 = new Histogram() - - doBefore { - histogram.clear() - histogram2.clear() - } - - "find the right bucket for various timings" in { - histogram.add(0) - histogram.get(true)(0) mustEqual 1 - histogram.add(Int.MaxValue) - histogram.get(true).last mustEqual 1 - histogram.add(1) - histogram.get(true)(1) mustEqual 1 // offset 2 - histogram.add(2) - histogram.get(true)(2) mustEqual 1 // offset 3 - histogram.add(10) - histogram.add(11) - histogram.get(true)(10) mustEqual 2 // offset 12 - } - - "add value buckets.last" in { - histogram.add(Histogram.buckets.last.toInt) - histogram.get(true).last mustEqual 1 - } - - "add value buckets.last+1" in { - histogram.add(Histogram.buckets.last.toInt + 1) - histogram.get(true).last mustEqual 1 - } - - "add value Int.MaxValue" in { - histogram.add(Int.MaxValue) - histogram.get(true).last mustEqual 1 - } - - "add value Int.MinValue" in { - histogram.add(Int.MinValue) - histogram.get(true).head mustEqual 1 - } - - "find histogram cutoffs for various percentages" in { - for (i <- 0 until 1000) { - histogram.add(i) - } - - case class shareABucketWith(n: Int) extends Matcher[Int]() { - def apply(v: => Int) = { - (Histogram.bucketIndex(n) == - Histogram.bucketIndex(v), - "%d and %d are in the same bucket".format(v, n), - "%d and %d are not in the same bucket".format(v, n)) - } - } - - histogram.getPercentile(0.0) must shareABucketWith(0) - histogram.getPercentile(0.5) must shareABucketWith(500) - histogram.getPercentile(0.9) must shareABucketWith(900) - histogram.getPercentile(0.99) must shareABucketWith(998) // 999 is a boundary - histogram.getPercentile(1.0) must shareABucketWith(1000) - } - - - "merge" in { - for (i <- 0 until 50) { - histogram.add(i * 10) - histogram2.add(i * 10) - } - val origTotal = histogram.count - histogram.merge(histogram2) - histogram.count mustEqual origTotal + histogram2.count - val stats = histogram.get(true) - val stats2 = histogram2.get(true) - for (i <- 0 until 50) { - val bucket = Histogram.bucketIndex(i * 10) - stats(bucket) mustEqual 2 * stats2(bucket) - } - } - - "clone" in { - for (i <- 0 until 50) { - histogram.add(i * 10) - } - val histClone = histogram.clone() - histogram.buckets.toList must containAll(histClone.buckets.toList) - histClone.buckets.toList must containAll(histogram.buckets.toList) - histogram.count mustEqual histClone.count - } - - "handle a very large timing" in { - histogram.add(Int.MaxValue) - histogram.getPercentile(0.0) mustEqual Int.MaxValue - histogram.getPercentile(0.1) mustEqual Int.MaxValue - histogram.getPercentile(0.9) mustEqual Int.MaxValue - histogram.getPercentile(1.0) mustEqual Int.MaxValue - } - - "handle an empty histogram" in { - histogram.getPercentile(0.0) mustEqual 0 - histogram.getPercentile(0.1) mustEqual 0 - histogram.getPercentile(0.9) mustEqual 0 - histogram.getPercentile(1.0) mustEqual 0 - } - - "track count and sum" in { - histogram.add(10) - histogram.add(15) - histogram.add(20) - histogram.add(20) - histogram.count mustEqual 4 - histogram.sum mustEqual 65 - } - - "getPercentile" in { - histogram.add(95) - // bucket covers [91, 99], midpoint is 95 - histogram.getPercentile(0.0) mustEqual 95 - histogram.getPercentile(0.5) mustEqual 95 - histogram.getPercentile(1.0) mustEqual 95 - } - - "getPercentile with no values" in { - histogram.getPercentile(0.0) mustEqual 0 - histogram.getPercentile(0.5) mustEqual 0 - histogram.getPercentile(1.0) mustEqual 0 - } - - "getPercentile with infinity" in { - histogram.add(Int.MaxValue) - histogram.getPercentile(0.5) mustEqual Int.MaxValue - } - - "minimum" in { - histogram.add(95) - histogram.minimum mustEqual 95 - } - - "minimum with no values" in { - histogram.minimum mustEqual 0 - } - - "minimum with infinity" in { - histogram.add(Int.MaxValue) - histogram.minimum mustEqual Int.MaxValue - } - - "maximum" in { - histogram.add(95) - histogram.maximum mustEqual 95 - } - - "maximum with no values" in { - histogram.maximum mustEqual 0 - } - - "maximum with infinity" in { - histogram.add(Int.MaxValue) - histogram.maximum mustEqual Int.MaxValue - } - - "equals" in { - histogram must beEqual(histogram2) - histogram.add(10) - histogram must not(beEqual(histogram2)) - histogram2.add(10) - histogram must beEqual(histogram2) - histogram.add(5) - histogram.add(10) - histogram2.add(15) - histogram must not(beEqual(histogram2)) - } - - "integer overflow shouldn't happen" in { - // This is equivalent of what's commented out below - val last = histogram.buckets.size - 1 - histogram.buckets(last) = Int.MaxValue - histogram.buckets(last - 1) = Int.MaxValue - histogram.count += 2L * Int.MaxValue - - // val n = Int.MaxValue - // val x = Histogram.buckets.last - // (1 to n) foreach { _ => - // histogram.add(x) - // histogram.add(x - 1) - // } - - histogram.getPercentile(0.1) must beGreaterThan(0) - } - - "Subtracting two histograms must never have negative count" in { - histogram.add(1) - histogram2.add(1) - histogram2.add(10) - - val h = (histogram - histogram2) - h.count mustEqual 0L - h.getPercentile(0.9999) mustEqual 0 - } - - "Subtracting two histograms must work" in { - val n = 10 - (1 to 2*n) foreach { i => histogram.add(i) } - (1 to n) foreach { i => histogram2.add(i) } - val histogram3 = new Histogram - (n+1 to 2*n) foreach { i => histogram3.add(i) } - - (histogram - histogram2) mustEqual histogram3 - (histogram2 - histogram) mustEqual (new Histogram) - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/stats/HistogramTest.scala b/src/test/scala/com/twitter/ostrich/stats/HistogramTest.scala new file mode 100644 index 00000000..7f4d706a --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/stats/HistogramTest.scala @@ -0,0 +1,309 @@ +/* + * Copyright 2010 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.stats + +import org.junit.runner.RunWith +import org.scalatest.FunSuite +import org.scalatest.junit.JUnitRunner +import org.scalatest.matchers.MatchResult +import org.scalatest.matchers.{Matcher, ShouldMatchers} + +@RunWith(classOf[JUnitRunner]) +class HistogramTest extends FunSuite with ShouldMatchers { + + class Context { + val histogram = new Histogram() + val histogram2 = new Histogram() + } + + test("find the right bucket for various timings") { + val context = new Context + import context._ + + histogram.add(0) + assert(histogram.get(true)(0) === 1) + histogram.add(Int.MaxValue) + assert(histogram.get(true).last === 1) + histogram.add(1) + assert(histogram.get(true)(1) === 1) // offset 2 + histogram.add(2) + assert(histogram.get(true)(2) === 1) // offset 3 + histogram.add(10) + histogram.add(11) + assert(histogram.get(true)(10) === 2) // offset 12 + } + + test("add value buckets.last") { + val context = new Context + import context._ + + histogram.add(Histogram.buckets.last.toInt) + assert(histogram.get(true).last === 1) + } + + test("add value buckets.last+1") { + val context = new Context + import context._ + + histogram.add(Histogram.buckets.last.toInt + 1) + assert(histogram.get(true).last === 1) + } + + test("add value Int.MaxValue") { + val context = new Context + import context._ + + histogram.add(Int.MaxValue) + assert(histogram.get(true).last === 1) + } + + test("add value Int.MinValue") { + val context = new Context + import context._ + + histogram.add(Int.MinValue) + assert(histogram.get(true).head === 1) + } + + test("find histogram cutoffs for various percentages") { + val context = new Context + import context._ + + for (i <- 0 until 1000) { + histogram.add(i) + } + + case class shareABucketWith(n: Int) extends Matcher[Int] { + def apply(v: Int) = { + MatchResult( + Histogram.bucketIndex(n) == + Histogram.bucketIndex(v), + "%d and %d are in the same bucket".format(v, n), + "%d and %d are not in the same bucket".format(v, n)) + } + } + + histogram.getPercentile(0.0) should shareABucketWith(0) + histogram.getPercentile(0.5) should shareABucketWith(500) + histogram.getPercentile(0.9) should shareABucketWith(900) + histogram.getPercentile(0.99) should shareABucketWith(998) // 999 is a boundary + histogram.getPercentile(1.0) should shareABucketWith(1000) + } + + + test("merge") { + val context = new Context + import context._ + + for (i <- 0 until 50) { + histogram.add(i * 10) + histogram2.add(i * 10) + } + val origTotal = histogram.count + histogram.merge(histogram2) + assert(histogram.count === origTotal + histogram2.count) + val stats = histogram.get(true) + val stats2 = histogram2.get(true) + for (i <- 0 until 50) { + val bucket = Histogram.bucketIndex(i * 10) + assert(stats(bucket) === 2 * stats2(bucket)) + } + } + + test("clone") { + val context = new Context + import context._ + + for (i <- 0 until 50) { + histogram.add(i * 10) + } + val histClone = histogram.clone() + assert(histogram.buckets.toList === histClone.buckets.toList) + assert(histClone.buckets.toList === histogram.buckets.toList) + assert(histogram.count === histClone.count) + } + + test("handle a very large timing") { + val context = new Context + import context._ + + histogram.add(Int.MaxValue) + assert(histogram.getPercentile(0.0) === Int.MaxValue) + assert(histogram.getPercentile(0.1) === Int.MaxValue) + assert(histogram.getPercentile(0.9) === Int.MaxValue) + assert(histogram.getPercentile(1.0) === Int.MaxValue) + } + + test("handle an empty histogram") { + val context = new Context + import context._ + + assert(histogram.getPercentile(0.0) === 0) + assert(histogram.getPercentile(0.1) === 0) + assert(histogram.getPercentile(0.9) === 0) + assert(histogram.getPercentile(1.0) === 0) + } + + test("track count and sum") { + val context = new Context + import context._ + + histogram.add(10) + histogram.add(15) + histogram.add(20) + histogram.add(20) + assert(histogram.count === 4) + assert(histogram.sum === 65) + } + + test("getPercentile") { + val context = new Context + import context._ + + histogram.add(95) + // bucket covers [91, 99], midpoint is 95 + assert(histogram.getPercentile(0.0) === 95) + assert(histogram.getPercentile(0.5) === 95) + assert(histogram.getPercentile(1.0) === 95) + } + + test("getPercentile with no values") { + val context = new Context + import context._ + + assert(histogram.getPercentile(0.0) === 0) + assert(histogram.getPercentile(0.5) === 0) + assert(histogram.getPercentile(1.0) === 0) + } + + test("getPercentile with infinity") { + val context = new Context + import context._ + + histogram.add(Int.MaxValue) + assert(histogram.getPercentile(0.5) === Int.MaxValue) + } + + test("minimum") { + val context = new Context + import context._ + + histogram.add(95) + assert(histogram.minimum === 95) + } + + test("minimum with no values") { + val context = new Context + import context._ + + assert(histogram.minimum === 0) + } + + test("minimum with infinity") { + val context = new Context + import context._ + + histogram.add(Int.MaxValue) + assert(histogram.minimum === Int.MaxValue) + } + + test("maximum") { + val context = new Context + import context._ + + histogram.add(95) + assert(histogram.maximum === 95) + } + + test("maximum with no values") { + val context = new Context + import context._ + + assert(histogram.maximum === 0) + } + + test("maximum with infinity") { + val context = new Context + import context._ + + histogram.add(Int.MaxValue) + assert(histogram.maximum === Int.MaxValue) + } + + test("equals") { + val context = new Context + import context._ + + assert(histogram === histogram2) + histogram.add(10) + assert(histogram !== histogram2) + histogram2.add(10) + assert(histogram === histogram2) + histogram.add(5) + histogram.add(10) + histogram2.add(15) + assert(histogram !== histogram2) + } + + test("integer overflow shouldn't happen") { + val context = new Context + import context._ + + // This is equivalent of what's commented out below + val last = histogram.buckets.size - 1 + histogram.buckets(last) = Int.MaxValue + histogram.buckets(last - 1) = Int.MaxValue + histogram.count += 2L * Int.MaxValue + + // val n = Int.MaxValue + // val x = Histogram.buckets.last + // (1 to n) foreach { _ => + // histogram.add(x) + // histogram.add(x - 1) + // } + + assert(histogram.getPercentile(0.1) > 0) + } + + test("Subtracting two histograms must never have negative count") { + val context = new Context + import context._ + + histogram.add(1) + histogram2.add(1) + histogram2.add(10) + + val h = (histogram - histogram2) + assert(h.count === 0L) + assert(h.getPercentile(0.9999) === 0) + } + + test("Substracting two histograms must work") { + val context = new Context + import context._ + + val n = 10 + (1 to 2*n) foreach { i => histogram.add(i) } + (1 to n) foreach { i => histogram2.add(i) } + val histogram3 = new Histogram + (n+1 to 2*n) foreach { i => histogram3.add(i) } + + assert((histogram - histogram2) === histogram3) + assert((histogram2 - histogram) === (new Histogram)) + } + +} diff --git a/src/test/scala/com/twitter/ostrich/stats/JsonStatsFetcherSpec.scala b/src/test/scala/com/twitter/ostrich/stats/JsonStatsFetcherSpec.scala deleted file mode 100644 index 14e81bae..00000000 --- a/src/test/scala/com/twitter/ostrich/stats/JsonStatsFetcherSpec.scala +++ /dev/null @@ -1,57 +0,0 @@ -package com.twitter.ostrich.stats - -import scala.io.Source -import com.twitter.io.TempFile -import com.twitter.ostrich.admin._ -import org.specs.SpecificationWithJUnit -import org.specs.util.TimeConversions._ - -/* - - Currently disabled because it's probably a bad idea to test - a ruby script this way. (Eg. it doesn't work with Travis-CI). - -class JsonStatsFetcherSpec extends SpecificationWithJUnit { - def exec(args: String*) = Runtime.getRuntime.exec(args.toArray) - - val hasRuby = try { - exec("ruby", "--version") - true - } catch { - case e: Throwable => false - } - - if (hasRuby) { - "json_stats_fetcher.rb" should { - var service: AdminHttpService = null - val script = TempFile.fromResourcePath("/json_stats_fetcher.rb").getAbsolutePath - - doBefore { - exec("chmod", "+x", script) - Stats.clearAll() - StatsListener.clearAll() - service = new AdminHttpService(0, 20, Stats, new RuntimeEnvironment(getClass)) - service.start() - } - - doAfter { - service.shutdown() - Stats.clearAll() - } - - def getStats = { - val process = exec(script, "-w", "-t", "1", "-p", service.address.getPort.toString, "-n") - process.waitFor() - Source.fromInputStream(process.getInputStream).mkString.split("\n") - } - - "fetch a stat" in { - Stats.incr("bugs") - getStats must contain("bugs=1") - Stats.incr("bugs", 37) - getStats must contain("bugs=37").eventually(3, 500.milliseconds) - } - } - } -} -*/ diff --git a/src/test/scala/com/twitter/ostrich/stats/JsonStatsLoggerSpec.scala b/src/test/scala/com/twitter/ostrich/stats/JsonStatsLoggerSpec.scala deleted file mode 100644 index 263ef023..00000000 --- a/src/test/scala/com/twitter/ostrich/stats/JsonStatsLoggerSpec.scala +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2010 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich -package stats - -import scala.collection.immutable -import com.twitter.conversions.string._ -import com.twitter.conversions.time._ -import com.twitter.logging.{BareFormatter, Level, Logger, StringHandler} -import com.twitter.util.Time -import org.specs.SpecificationWithJUnit - -class JsonStatsLoggerSpec extends SpecificationWithJUnit { - "JsonStatsLogger" should { - val logger = Logger.get("stats") - - var handler: StringHandler = null - var collection: StatsCollection = null - var statsLogger: JsonStatsLogger = null - - def getLines() = { - handler.get.split("\n").toList.filter { s => s.startsWith("#Fields") || !s.startsWith("#") } - } - - doBefore { - handler = new StringHandler(BareFormatter, None) - logger.addHandler(handler) - logger.setUseParentHandlers(false) - logger.setLevel(Level.INFO) - handler.clear() - collection = new StatsCollection() - statsLogger = new JsonStatsLogger(logger, 1.second, None, collection) - } - - "log basic stats" in { - collection.incr("cats") - collection.incr("dogs", 3) - statsLogger.periodic() - val line = getLines()(0) - line mustMatch "\"cats\":1" - line mustMatch "\"dogs\":3" - } - - "log timings" in { - Time.withCurrentTimeFrozen { time => - collection.time("zzz") { time advance 10.milliseconds } - collection.time("zzz") { time advance 20.milliseconds } - statsLogger.periodic() - val line = getLines()(0) - line mustMatch "\"zzz_msec_count\":2" - line mustMatch "\"zzz_msec_average\":15" - line mustMatch "\"zzz_msec_p50\":10" - } - } - - "log gauges as ints when appropriate" in { - collection.setGauge("horse", 3.5) - collection.setGauge("cow", 1234567890.0) - statsLogger.periodic() - val line = getLines()(0) - line mustMatch "\"horse\":3.5" - line mustMatch "\"cow\":1234567890" - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/stats/JsonStatsLoggerTest.scala b/src/test/scala/com/twitter/ostrich/stats/JsonStatsLoggerTest.scala new file mode 100644 index 00000000..4427caef --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/stats/JsonStatsLoggerTest.scala @@ -0,0 +1,89 @@ +/* + * Copyright 2010 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.stats + +import com.twitter.conversions.string._ +import com.twitter.conversions.time._ +import com.twitter.logging.{BareFormatter, Level, Logger, StringHandler} +import com.twitter.util.Time +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import org.scalatest.FunSuite + +@RunWith(classOf[JUnitRunner]) +class JsonStatsLoggerTest extends FunSuite { + + class Context { + val logger = Logger.get("stats") + + var handler: StringHandler = null + var collection: StatsCollection = null + var statsLogger: JsonStatsLogger = null + + def getLines() = { + handler.get.split("\n").toList.filter { s => s.startsWith("#Fields") || !s.startsWith("#") } + } + + handler = new StringHandler(BareFormatter, None) + logger.addHandler(handler) + logger.setUseParentHandlers(false) + logger.setLevel(Level.INFO) + handler.clear() + collection = new StatsCollection() + statsLogger = new JsonStatsLogger(logger, 1.second, None, collection) + } + + test("log basic stats") { + val context = new Context + import context._ + + collection.incr("cats") + collection.incr("dogs", 3) + statsLogger.periodic() + val line = getLines()(0) + assert(line.contains("\"cats\":1")) + assert(line.contains("\"dogs\":3")) + } + + test("log timings") { + val context = new Context + import context._ + + Time.withCurrentTimeFrozen { time => + collection.time("zzz") { time advance 10.milliseconds } + collection.time("zzz") { time advance 20.milliseconds } + statsLogger.periodic() + val line = getLines()(0) + assert(line.contains("\"zzz_msec_count\":2")) + assert(line.contains("\"zzz_msec_average\":15")) + assert(line.contains("\"zzz_msec_p50\":10")) + } + } + + test("log gauges as ints when appropriate") { + val context = new Context + import context._ + + collection.setGauge("horse", 3.5) + collection.setGauge("cow", 1234567890.0) + statsLogger.periodic() + val line = getLines()(0) + assert(line.contains("\"horse\":3.5")) + assert(line.contains("\"cow\":1234567890")) + } + +} diff --git a/src/test/scala/com/twitter/ostrich/stats/LocalStatsCollectionSpec.scala b/src/test/scala/com/twitter/ostrich/stats/LocalStatsCollectionSpec.scala deleted file mode 100644 index d357b1db..00000000 --- a/src/test/scala/com/twitter/ostrich/stats/LocalStatsCollectionSpec.scala +++ /dev/null @@ -1,38 +0,0 @@ -package com.twitter.ostrich.stats - -import com.twitter.util.Future -import org.specs.SpecificationWithJUnit - -class LocalStatsCollectionSpec extends SpecificationWithJUnit { - val jobClassName = "rooster.TestCapturer" - - "LocalStatsCollection" should { - val localStats = LocalStatsCollection(jobClassName) - - doAfter { - Stats.clearAll() - } - - "writes to global stats at the same time" in { - localStats.addMetric("whateva", 5) - localStats.addMetric("whateva", 15) - Stats.getMetric("whateva")() mustEqual Distribution(Histogram(5, 15)) - localStats.getMetric("whateva")() mustEqual Distribution(Histogram(5, 15)) - } - - "flush" in { - localStats.incr("tflock") - localStats.incr("tflock") - localStats.getCounter("tflock")() mustEqual 2 - localStats.addMetric("timing", 900) - localStats.getMetric("timing")() mustEqual Distribution(Histogram(900)) - - localStats.flushInto(Stats) - - Stats.getCounter(jobClassName + ".tflock")() mustEqual 2 - Stats.getMetric(jobClassName + ".timing")() mustEqual Distribution(Histogram(900)) - localStats.getCounter("tflock")() mustEqual 0 - localStats.getMetric("timing")() mustEqual Distribution(Histogram()) - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/stats/LocalStatsCollectionTest.scala b/src/test/scala/com/twitter/ostrich/stats/LocalStatsCollectionTest.scala new file mode 100644 index 00000000..f0fa2bb1 --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/stats/LocalStatsCollectionTest.scala @@ -0,0 +1,48 @@ +package com.twitter.ostrich.stats + +import com.twitter.util.Future +import org.junit.runner.RunWith +import org.scalatest.{BeforeAndAfter, FunSuite} +import org.scalatest.junit.JUnitRunner + +@RunWith(classOf[JUnitRunner]) +class LocalStatsCollectionTest extends FunSuite with BeforeAndAfter { + + class Context { + val jobClassName = "rooster.TestCapturer" + val localStats = LocalStatsCollection(jobClassName) + } + + after { + Stats.clearAll() + } + + test("writes to global stats at the same time") { + val context = new Context + import context._ + + localStats.addMetric("whateva", 5) + localStats.addMetric("whateva", 15) + assert(Stats.getMetric("whateva")() === Distribution(Histogram(5, 15))) + assert(localStats.getMetric("whateva")() === Distribution(Histogram(5, 15))) + } + + test("flush") { + val context = new Context + import context._ + + localStats.incr("tflock") + localStats.incr("tflock") + assert(localStats.getCounter("tflock")() === 2) + localStats.addMetric("timing", 900) + assert(localStats.getMetric("timing")() === Distribution(Histogram(900))) + + localStats.flushInto(Stats) + + assert(Stats.getCounter(jobClassName + ".tflock")() === 2) + assert(Stats.getMetric(jobClassName + ".timing")() === Distribution(Histogram(900))) + assert(localStats.getCounter("tflock")() === 0) + assert(localStats.getMetric("timing")() === Distribution(Histogram())) + } + +} diff --git a/src/test/scala/com/twitter/ostrich/stats/MetricSpec.scala b/src/test/scala/com/twitter/ostrich/stats/MetricSpec.scala deleted file mode 100644 index 3c0aa82e..00000000 --- a/src/test/scala/com/twitter/ostrich/stats/MetricSpec.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2011 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich.stats - -import org.specs.SpecificationWithJUnit - -class MetricSpec extends SpecificationWithJUnit { - "Metric" should { - "min, max, mean" in { - val metric = new Metric() - metric.add(10) - metric.add(20) - metric() mustEqual Distribution(Histogram(10, 20)) - metric.add(60) - metric() mustEqual Distribution(Histogram(10, 20, 60)) - - metric().histogram.get(false) mustEqual Histogram(10, 20, 60).get(false) - } - - "add distribution" in { - val metric = new Metric() - metric.add(Distribution(Histogram(10, 20))) - metric.add(60) - metric() mustEqual Distribution(Histogram(10, 20, 60)) - } - - "clear" in { - val metric = new Metric() - metric.add(10) - metric.add(20) - metric() mustEqual Distribution(Histogram(10, 20)) - metric() mustEqual Distribution(Histogram(10, 20)) - metric.clear() - metric() mustEqual Distribution(Histogram()) - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/stats/MetricTest.scala b/src/test/scala/com/twitter/ostrich/stats/MetricTest.scala new file mode 100644 index 00000000..c35aeb95 --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/stats/MetricTest.scala @@ -0,0 +1,54 @@ +/* + * Copyright 2011 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.stats + +import org.junit.runner.RunWith +import org.scalatest.FunSuite +import org.scalatest.junit.JUnitRunner + +@RunWith(classOf[JUnitRunner]) +class MetricTest extends FunSuite { + + test("min, max, mean") { + val metric = new Metric() + metric.add(10) + metric.add(20) + assert(metric() === Distribution(Histogram(10, 20))) + metric.add(60) + assert(metric() === Distribution(Histogram(10, 20, 60))) + + assert(metric().histogram.get(false) === Histogram(10, 20, 60).get(false)) + } + + test("add distribution") { + val metric = new Metric() + metric.add(Distribution(Histogram(10, 20))) + metric.add(60) + assert(metric() === Distribution(Histogram(10, 20, 60))) + } + + test("clear") { + val metric = new Metric() + metric.add(10) + metric.add(20) + assert(metric() === Distribution(Histogram(10, 20))) + assert(metric() === Distribution(Histogram(10, 20))) + metric.clear() + assert(metric() === Distribution(Histogram())) + } + +} diff --git a/src/test/scala/com/twitter/ostrich/stats/StatsCollectionSpec.scala b/src/test/scala/com/twitter/ostrich/stats/StatsCollectionSpec.scala deleted file mode 100644 index 5c8ef510..00000000 --- a/src/test/scala/com/twitter/ostrich/stats/StatsCollectionSpec.scala +++ /dev/null @@ -1,242 +0,0 @@ -/* - * Copyright 2009 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich -package stats - -import scala.collection.{immutable, mutable} -import com.twitter.conversions.string._ -import com.twitter.conversions.time._ -import com.twitter.logging.{Level, Logger} -import com.twitter.util.{Time, Future} -import org.specs.SpecificationWithJUnit - -class StatsCollectionSpec extends SpecificationWithJUnit { - "StatsCollection" should { - val collection = new StatsCollection() - - doBefore { - Logger.get("").setLevel(Level.OFF) - } - - "fillInJvmGauges" in { - val map = new mutable.HashMap[String, Double] - collection.fillInJvmGauges(map) - map.keys.toList must contain("jvm_num_cpus") - map.keys.toList must contain("jvm_heap_used") - map.keys.toList must contain("jvm_start_time") - map.keys.toList must contain("jvm_post_gc_used") - } - - "fillInJvmCounters" in { - val map = new mutable.HashMap[String, Long] - collection.fillInJvmCounters(map) - map.keys.toList must contain("jvm_gc_cycles") - map.keys.toList must contain("jvm_gc_msec") - } - - "StatsSummary filtering" in { - val summary = StatsSummary( - Map("apples" -> 10, "oranges" -> 13, "appliances" -> 4, "bad_oranges" -> 1), - Map(), - Map(), - Map() - ) - - summary.filterOut("""app.*""".r).counters mustEqual Map("oranges" -> 13, "bad_oranges" -> 1) - summary.filterOut("""xyz.*""".r).counters mustEqual summary.counters - summary.filterOut(""".*oranges""".r).counters mustEqual Map("apples" -> 10, "appliances" -> 4) - } - - "counters" in { - "basic" in { - collection.incr("widgets", 1) - collection.incr("wodgets", 12) - collection.incr("wodgets") - collection.getCounters() mustEqual Map("widgets" -> 1, "wodgets" -> 13) - } - - "negative" in { - collection.incr("widgets", 3) - collection.incr("widgets", -1) - collection.getCounters() mustEqual Map("widgets" -> 2) - } - - "clearCounter" in { - collection.getCounter("smellyfeet") - collection.incr("smellyfeet", 1) - collection.getCounters() mustEqual Map("smellyfeet" -> 1) - collection.removeCounter("smellyfeet") - collection.getCounters() mustEqual Map() - } - } - - "metrics" in { - "empty" in { - collection.addMetric("test", 0) - val test = collection.getMetric("test") - test() mustEqual new Distribution(Histogram(0)) - test() mustEqual new Distribution(Histogram(0)) - // the timings list will be empty here: - test.clear() - test() mustEqual new Distribution(Histogram()) - } - - "basic min/max/average" in { - collection.addMetric("test", 1) - collection.addMetric("test", 2) - collection.addMetric("test", 3) - val test = collection.getMetric("test") - test() mustEqual new Distribution(Histogram(1, 2, 3)) - } - - "report" in { - var x = 0 - collection.time("hundred") { Thread.sleep(10) } - val timings = collection.getMetrics() - timings.keys.toList mustEqual List("hundred_msec") - timings("hundred_msec").count mustEqual 1 - timings("hundred_msec").minimum must be_>(0) - timings("hundred_msec").maximum must be_>(0) - } - - "time future" in { - val future = Future({ Thread.sleep(10); 100 }) - - collection.timeFutureMillis("latency")(future)() mustEqual 100 - - val timings = collection.getMetrics() - timings("latency_msec").count mustEqual 1 - timings("latency_msec").minimum must be_>(0) - timings("latency_msec").minimum must be_>(0) - } - - "average of 0" in { - collection.addMetric("test", 0) - val test = collection.getMetric("test") - test() mustEqual new Distribution(Histogram(0)) - } - - "ignore negative timings" in { - collection.addMetric("test", 1) - collection.addMetric("test", -1) - collection.addMetric("test", Int.MinValue) - val test = collection.getMetric("test") - test() mustEqual new Distribution(Histogram(1)) - } - - "boundary timing sizes" in { - collection.addMetric("test", Int.MaxValue) - collection.addMetric("test", 5) - val sum = 5 + Int.MaxValue - val avg = sum / 2.0 - val test = collection.getMetric("test") - test() mustEqual - new Distribution(Histogram(5, Int.MaxValue)) - } - - "handle code blocks" in { - Time.withCurrentTimeFrozen { time => - collection.time("test") { - time.advance(10.millis) - } - val test = collection.getMetric("test_msec") - test().average must be_>=(10.0) - } - } - - "reset when asked" in { - var x = 0 - collection.time("hundred") { for (i <- 0 until 100) x += i } - collection.getMetric("hundred_msec")().count mustEqual 1 - collection.time("hundred") { for (i <- 0 until 100) x += i } - collection.getMetric("hundred_msec")().count mustEqual 2 - collection.getMetric("hundred_msec").clear() - collection.time("hundred") { for (i <- 0 until 100) x += i } - collection.getMetric("hundred_msec")().count mustEqual 1 - } - - "add bundle of timings at once" in { - val timingStat = new Distribution(Histogram(10, 15, 20)) - collection.addMetric("test", timingStat) - collection.addMetric("test", 25) - collection.getMetric("test")() mustEqual Distribution(Histogram(10, 15, 20, 25)) - } - - "add multiple bundles of timings" in { - val timingStat1 = new Distribution(Histogram(15, 25)) - val timingStat2 = new Distribution(Histogram(10, 20, 25)) - collection.addMetric("test", timingStat1) - collection.addMetric("test", timingStat2) - collection.getMetric("test")() mustEqual Distribution(Histogram(10, 15, 20, 25, 25)) - } - - "timing stats can be added and reflected in Stats.getMetrics" in { - Stats.addMetric("foobar", new Distribution(Histogram(10))) - Stats.getMetrics()("foobar").count mustEqual 1 - Stats.addMetric("foobar", new Distribution(Histogram(20, 30))) - Stats.getMetrics()("foobar").count mustEqual 3 - } - - "report text in sorted order" in { - Stats.addMetric("alpha", new Distribution(Histogram(0))) - Stats.getMetrics()("alpha").toString mustEqual - "(average=0, count=1, maximum=0, minimum=0, " + - "p50=0, p90=0, p95=0, p99=0, p999=0, p9999=0, sum=0)" - } - } - - "gauges" in { - val collection = new StatsCollection() - - "report" in { - collection.addGauge("pi") { java.lang.Math.PI } - collection.getGauges() mustEqual Map("pi" -> java.lang.Math.PI) - } - - "setGauge" in { - collection.setGauge("stew", 11.0) - collection.getGauge("stew") mustEqual Some(11.0) - } - - "getGauge" in { - collection.setGauge("stew", 11.0) - collection.getGauges() mustEqual Map("stew" -> 11.0) - } - - "swallow exceptions" in { - collection.addGauge("YIKES") { throw new RuntimeException("YIKES") } - collection.getGauges() mustEqual Map.empty[String, Double] - collection.getGauge("YIKES") mustEqual None - } - - "clearGauge" in { - collection.setGauge("stew", 11.0) - collection.clearGauge("stew") - collection.getGauges() mustEqual Map() - } - - "update" in { - var potatoes = 100.0 - // gauge that increments every time it's read: - collection.addGauge("stew") { potatoes += 1.0; potatoes } - collection.getGauges() mustEqual Map("stew" -> 101.0) - collection.getGauges() mustEqual Map("stew" -> 102.0) - collection.getGauges() mustEqual Map("stew" -> 103.0) - } - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/stats/StatsCollectionTest.scala b/src/test/scala/com/twitter/ostrich/stats/StatsCollectionTest.scala new file mode 100644 index 00000000..95ee3b9c --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/stats/StatsCollectionTest.scala @@ -0,0 +1,273 @@ +/* + * Copyright 2009 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.stats + +import com.twitter.conversions.string._ +import com.twitter.conversions.time._ +import com.twitter.logging.{Level, Logger} +import com.twitter.util.{Time, Future} +import org.junit.runner.RunWith +import org.scalatest.{BeforeAndAfter, FunSuite} +import org.scalatest.junit.JUnitRunner +import scala.collection.mutable + +@RunWith(classOf[JUnitRunner]) +class StatsCollectionTest extends FunSuite with BeforeAndAfter { + + class Context { + val collection = new StatsCollection() + } + + before { + Logger.get("").setLevel(Level.OFF) + } + + test("fillInJvmGauges") { + val context = new Context + import context._ + + val map = new mutable.HashMap[String, Double] + collection.fillInJvmGauges(map) + assert(map.keys.toList.contains("jvm_num_cpus")) + assert(map.keys.toList.contains("jvm_heap_used")) + assert(map.keys.toList.contains("jvm_start_time")) + assert(map.keys.toList.contains("jvm_post_gc_used")) + } + + test("fillInJvmCounters") { + val context = new Context + import context._ + + val map = new mutable.HashMap[String, Long] + collection.fillInJvmCounters(map) + assert(map.keys.toList.contains("jvm_gc_cycles")) + assert(map.keys.toList.contains("jvm_gc_msec")) + } + + test("StatsSummary filtering") { + val summary = StatsSummary( + Map("apples" -> 10, "oranges" -> 13, "appliances" -> 4, "bad_oranges" -> 1), + Map(), + Map(), + Map() + ) + + assert(summary.filterOut("""app.*""".r).counters === Map("oranges" -> 13, "bad_oranges" -> 1)) + assert(summary.filterOut("""xyz.*""".r).counters === summary.counters) + assert(summary.filterOut(""".*oranges""".r).counters === Map("apples" -> 10, "appliances" -> 4)) + } + + test("counters") { + new Context { + info("basic") + collection.incr("widgets", 1) + collection.incr("wodgets", 12) + collection.incr("wodgets") + assert(collection.getCounters() === Map("widgets" -> 1, "wodgets" -> 13)) + } + + new Context { + info("negative") + collection.incr("widgets", 3) + collection.incr("widgets", -1) + assert(collection.getCounters() === Map("widgets" -> 2)) + } + + new Context { + info("clearCounter") + collection.getCounter("smellyfeet") + collection.incr("smellyfeet", 1) + assert(collection.getCounters() === Map("smellyfeet" -> 1)) + collection.removeCounter("smellyfeet") + assert(collection.getCounters() === Map()) + } + } + + test("metrics") { + new Context { + info("empty") + collection.addMetric("test", 0) + val test = collection.getMetric("test") + assert(test() === new Distribution(Histogram(0))) + assert(test() === new Distribution(Histogram(0))) + // the timings list will be empty here: + test.clear() + assert(test() === new Distribution(Histogram())) + } + + new Context { + info("basic min/max/average") + collection.addMetric("test", 1) + collection.addMetric("test", 2) + collection.addMetric("test", 3) + val test = collection.getMetric("test") + assert(test() === new Distribution(Histogram(1, 2, 3))) + } + + new Context { + info("report") + var x = 0 + collection.time("hundred") { Thread.sleep(10) } + val timings = collection.getMetrics() + assert(timings.keys.toList === List("hundred_msec")) + assert(timings("hundred_msec").count === 1) + assert(timings("hundred_msec").minimum > 0) + assert(timings("hundred_msec").maximum > 0) + } + + new Context { + info("time future") + val future = Future({ Thread.sleep(10); 100 }) + + assert(collection.timeFutureMillis("latency")(future)() === 100) + + val timings = collection.getMetrics() + assert(timings("latency_msec").count === 1) + assert(timings("latency_msec").minimum > 0) + assert(timings("latency_msec").minimum > 0) + } + + new Context { + info("average of 0") + collection.addMetric("test", 0) + val test = collection.getMetric("test") + assert(test() === new Distribution(Histogram(0))) + } + + new Context { + info("ignore negative timings") + collection.addMetric("test", 1) + collection.addMetric("test", -1) + collection.addMetric("test", Int.MinValue) + val test = collection.getMetric("test") + assert(test() === new Distribution(Histogram(1))) + } + + new Context { + info("boundary timing sizes") + collection.addMetric("test", Int.MaxValue) + collection.addMetric("test", 5) + val sum = 5 + Int.MaxValue + val avg = sum / 2.0 + val test = collection.getMetric("test") + assert(test() === + new Distribution(Histogram(5, Int.MaxValue))) + } + + new Context { + info("handle code blocks") + Time.withCurrentTimeFrozen { time => + collection.time("test") { + time.advance(10.millis) + } + val test = collection.getMetric("test_msec") + assert(test().average >= 10.0) + } + } + + new Context { + info("reset when asked") + var x = 0 + collection.time("hundred") { for (i <- 0 until 100) x += i } + assert(collection.getMetric("hundred_msec")().count === 1) + collection.time("hundred") { for (i <- 0 until 100) x += i } + assert(collection.getMetric("hundred_msec")().count === 2) + collection.getMetric("hundred_msec").clear() + collection.time("hundred") { for (i <- 0 until 100) x += i } + assert(collection.getMetric("hundred_msec")().count === 1) + } + + new Context { + info("add bundle of timings at once") + val timingStat = new Distribution(Histogram(10, 15, 20)) + collection.addMetric("test", timingStat) + collection.addMetric("test", 25) + assert(collection.getMetric("test")() === Distribution(Histogram(10, 15, 20, 25))) + } + + new Context { + info("add multiple bundles of timings") + val timingStat1 = new Distribution(Histogram(15, 25)) + val timingStat2 = new Distribution(Histogram(10, 20, 25)) + collection.addMetric("test", timingStat1) + collection.addMetric("test", timingStat2) + assert(collection.getMetric("test")() === Distribution(Histogram(10, 15, 20, 25, 25))) + } + + new Context { + info("timing stats can be added and reflected in Stats.getMetrics") + Stats.addMetric("foobar", new Distribution(Histogram(10))) + assert(Stats.getMetrics()("foobar").count === 1) + Stats.addMetric("foobar", new Distribution(Histogram(20, 30))) + assert(Stats.getMetrics()("foobar").count === 3) + } + + new Context { + info("report text in sorted order") + Stats.addMetric("alpha", new Distribution(Histogram(0))) + assert(Stats.getMetrics()("alpha").toString === + "(average=0, count=1, maximum=0, minimum=0, " + + "p50=0, p90=0, p95=0, p99=0, p999=0, p9999=0, sum=0)") + } + } + + test("gauges") { + + new Context { + info("report") + collection.addGauge("pi") { java.lang.Math.PI } + assert(collection.getGauges() === Map("pi" -> java.lang.Math.PI)) + } + + new Context { + info("setGauge") + collection.setGauge("stew", 11.0) + assert(collection.getGauge("stew") === Some(11.0)) + } + + new Context { + info("getGauge") + collection.setGauge("stew", 11.0) + assert(collection.getGauges() === Map("stew" -> 11.0)) + } + + new Context { + info("swallow exceptions") + collection.addGauge("YIKES") { throw new RuntimeException("YIKES") } + assert(collection.getGauges() === Map.empty[String, Double]) + assert(collection.getGauge("YIKES") === None) + } + + new Context { + info("clearGauge") + collection.setGauge("stew", 11.0) + collection.clearGauge("stew") + assert(collection.getGauges() === Map()) + } + + new Context { + info("update") + var potatoes = 100.0 + // gauge that increments every time it's read: + collection.addGauge("stew") { potatoes += 1.0; potatoes } + assert(collection.getGauges() === Map("stew" -> 101.0)) + assert(collection.getGauges() === Map("stew" -> 102.0)) + assert(collection.getGauges() === Map("stew" -> 103.0)) + } + } + +} diff --git a/src/test/scala/com/twitter/ostrich/stats/StatsListenerSpec.scala b/src/test/scala/com/twitter/ostrich/stats/StatsListenerSpec.scala deleted file mode 100644 index bc40fbc1..00000000 --- a/src/test/scala/com/twitter/ostrich/stats/StatsListenerSpec.scala +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright 2011 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich.stats - -import com.twitter.conversions.time._ -import com.twitter.ostrich.admin.PeriodicBackgroundProcess -import org.specs.SpecificationWithJUnit -import org.specs.util.Duration - -class StatsListenerSpec extends SpecificationWithJUnit { - "StatsListener object" should { - var collection: StatsCollection = null - - doBefore { - collection = new StatsCollection() - StatsListener.clearAll() - } - - "track latched listeners" in { - StatsListener.listeners.size() mustEqual 0 - val listener = StatsListener(1.minute, collection) - val listener2 = StatsListener(1.minute, collection) - listener must be(listener2) - StatsListener.listeners.size() mustEqual 1 - StatsListener(500.millis, collection) mustNot be(listener) - StatsListener.listeners.size() mustEqual 2 - val key = ("period:%d".format(1.minute.inMillis), collection) - StatsListener.listeners.containsKey(key) must beTrue - StatsListener.listeners.get(key) mustEqual listener - } - - "tracks named listeners" in { - val monkeyListener = StatsListener("monkey", collection) - StatsListener("donkey", collection) mustNot be(monkeyListener) - StatsListener("monkey", collection) must be(monkeyListener) - } - } - - "StatsListener instance" should { - var collection: StatsCollection = null - var listener: StatsListener = null - var listener2: StatsListener = null - - doBefore { - collection = new StatsCollection() - listener = new StatsListener(collection) - listener2 = new StatsListener(collection) - StatsListener.clearAll() - } - - - "reports basic stats" in { - "counters" in { - collection.incr("b", 4) - collection.incr("a", 3) - - listener.getCounters() mustEqual Map("a" -> 3, "b" -> 4) - collection.incr("a", 2) - listener.getCounters() mustEqual Map("a" -> 2, "b" -> 0) - } - - "metrics" in { - collection.addMetric("beans", 3) - collection.addMetric("beans", 4) - collection.getMetrics() mustEqual Map("beans" -> Distribution(Histogram(3, 4))) - listener.getMetrics() mustEqual Map("beans" -> Histogram(3, 4)) - listener2.getMetrics() mustEqual Map("beans" -> Histogram(3, 4)) - } - } - - "independently tracks deltas" in { - "counters" in { - collection.incr("a", 3) - listener.getCounters() mustEqual Map("a" -> 3) - collection.incr("a", 5) - listener2.getCounters() mustEqual Map("a" -> 8) - collection.incr("a", 1) - listener.getCounters() mustEqual Map("a" -> 6) - } - - "metrics" in { - collection.addMetric("timing", 10) - collection.addMetric("timing", 20) - listener.getMetrics() mustEqual Map("timing" -> Histogram(10, 20)) - collection.addMetric("timing", 10) - listener2.getMetrics() mustEqual Map("timing" -> Histogram(10, 20, 10)) - collection.addMetric("timing", 10) - listener.getMetrics() mustEqual Map("timing" -> Histogram(10, 10)) - listener2.getMetrics() mustEqual Map("timing" -> Histogram(10)) - - listener.getMetrics() mustEqual Map("timing" -> Histogram()) - listener2.getMetrics() mustEqual Map("timing" -> Histogram()) - } - } - - "master stats always increase, even with listeners connected" in { - "counters" in { - collection.incr("a", 3) - listener.getCounters() mustEqual Map("a" -> 3) - collection.incr("a", 5) - listener.getCounters() mustEqual Map("a" -> 5) - - collection.getCounters() mustEqual Map("a" -> 8) - } - - "metrics" in { - collection.addMetric("timing", 10) - collection.addMetric("timing", 20) - listener.getMetrics() mustEqual Map("timing" -> Histogram(10, 20)) - collection.addMetric("timing", 10) - - collection.getMetrics() mustEqual Map("timing" -> Distribution(Histogram(10, 20, 10))) - } - } - - "tracks stats only from the point a listener was attached, but report all keys" in { - collection.incr("a", 5) - collection.incr("b", 5) - collection.addMetric("beans", 5) - collection.addMetric("rice", 5) - val listener3 = new StatsListener(collection) - collection.incr("a", 70) - collection.incr("a", 300) - collection.addMetric("beans", 3) - listener3.getCounters() mustEqual Map("a" -> 370, "b" -> 0) - listener3.getMetrics() mustEqual - Map("beans" -> Histogram(3), - "rice" -> Histogram()) - } - } - - "LatchedStatsListener instance" should { - "latch to the top of a period" in { - val collection = new StatsCollection() - val listener = new LatchedStatsListener(collection, 1.second) { - override lazy val service = new PeriodicBackgroundProcess("", 1.second) { - def periodic() { } - } - } - - var gauge = 0 - collection.incr("counter", 5) - collection.addGauge("gauge") { synchronized { gauge } } - collection.setLabel("label", "HIMYNAMEISBRAK") - collection.addMetric("metric", Distribution(Histogram(1, 2))) - - listener.getCounters() mustEqual Map() - listener.getGauges() mustEqual Map() - listener.getLabels() mustEqual Map() - listener.getMetrics() mustEqual Map() - - listener.nextLatch() - - listener.getCounters() mustEqual Map("counter" -> 5) - listener.getGauges() mustEqual Map("gauge" -> 0) - listener.getLabels() mustEqual Map("label" -> "HIMYNAMEISBRAK") - listener.getMetrics() mustEqual Map("metric" -> Histogram(1, 2)) - - collection.incr("counter", 3) - synchronized { gauge = 37 } - collection.setLabel("label", "EEPEEPIAMAMONKEY") - collection.addMetric("metric", Distribution(Histogram(3, 4, 5))) - - listener.getCounters() mustEqual Map("counter" -> 5) - listener.getGauges() mustEqual Map("gauge" -> 0) - listener.getLabels() mustEqual Map("label" -> "HIMYNAMEISBRAK") - listener.getMetrics() mustEqual Map("metric" -> Histogram(1, 2)) - - listener.nextLatch() - - listener.getCounters() mustEqual Map("counter" -> 3) - listener.getGauges() mustEqual Map("gauge" -> 37) - listener.getLabels() mustEqual Map("label" -> "EEPEEPIAMAMONKEY") - listener.getMetrics() mustEqual Map("metric" -> Histogram(3, 4, 5)) - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/stats/StatsListenerTest.scala b/src/test/scala/com/twitter/ostrich/stats/StatsListenerTest.scala new file mode 100644 index 00000000..00c899bc --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/stats/StatsListenerTest.scala @@ -0,0 +1,196 @@ +/* + * Copyright 2011 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.stats + +import com.twitter.conversions.time._ +import com.twitter.ostrich.admin.PeriodicBackgroundProcess +import com.twitter.util.Future +import org.junit.runner.RunWith +import org.scalatest.FunSuite +import org.scalatest.junit.JUnitRunner + +@RunWith(classOf[JUnitRunner]) +class StatsListenerTest extends FunSuite { + + class StatsListenerObjectContext { + var collection: StatsCollection = new StatsCollection() + StatsListener.clearAll() + } + + test("tracks latched listeners") { + val context = new StatsListenerObjectContext + import context._ + + assert(StatsListener.listeners.size() === 0) + val listener = StatsListener(1.minute, collection) + val listener2 = StatsListener(1.minute, collection) + assert(listener === listener2) + assert(StatsListener.listeners.size() === 1) + assert(StatsListener(500.millis, collection) !== listener) + assert(StatsListener.listeners.size() === 2) + val key = ("period:%d".format(1.minute.inMillis), collection) + assert(StatsListener.listeners.containsKey(key)) + assert(StatsListener.listeners.get(key) === listener) + } + + test("tracks named listeners") { + val context = new StatsListenerObjectContext + import context._ + + val monkeyListener = StatsListener("monkey", collection) + assert(StatsListener("donkey", collection) !== monkeyListener) + assert(StatsListener("monkey", collection) === monkeyListener) + } + + class StatsListenerInstanceContext { + var collection: StatsCollection = new StatsCollection() + var listener: StatsListener = new StatsListener(collection) + var listener2: StatsListener = new StatsListener(collection) + StatsListener.clearAll() + } + + test("reports basic stats") { + val context = new StatsListenerInstanceContext + import context._ + + info("counters") + collection.incr("b", 4) + collection.incr("a", 3) + + assert(listener.getCounters() === Map("a" -> 3, "b" -> 4)) + collection.incr("a", 2) + assert(listener.getCounters() === Map("a" -> 2, "b" -> 0)) + + info("metrics") + collection.addMetric("beans", 3) + collection.addMetric("beans", 4) + assert(collection.getMetrics() === Map("beans" -> Distribution(Histogram(3, 4)))) + assert(listener.getMetrics() === Map("beans" -> Histogram(3, 4))) + assert(listener2.getMetrics() === Map("beans" -> Histogram(3, 4))) + } + + test("independently tracks deltas") { + val context = new StatsListenerInstanceContext + import context._ + + info("counters") + collection.incr("a", 3) + assert(listener.getCounters() === Map("a" -> 3)) + collection.incr("a", 5) + assert(listener2.getCounters() === Map("a" -> 8)) + collection.incr("a", 1) + assert(listener.getCounters() === Map("a" -> 6)) + + info("metrics") + collection.addMetric("timing", 10) + collection.addMetric("timing", 20) + assert(listener.getMetrics() === Map("timing" -> Histogram(10, 20))) + collection.addMetric("timing", 10) + assert(listener2.getMetrics() === Map("timing" -> Histogram(10, 20, 10))) + collection.addMetric("timing", 10) + assert(listener.getMetrics() === Map("timing" -> Histogram(10, 10))) + assert(listener2.getMetrics() === Map("timing" -> Histogram(10))) + + assert(listener.getMetrics() === Map("timing" -> Histogram())) + assert(listener2.getMetrics() === Map("timing" -> Histogram())) + } + + test("master stats always increase, even with listeners connected") { + val context = new StatsListenerInstanceContext + import context._ + + info("counters") + collection.incr("a", 3) + assert(listener.getCounters() === Map("a" -> 3)) + collection.incr("a", 5) + assert(listener.getCounters() === Map("a" -> 5)) + + assert(collection.getCounters() === Map("a" -> 8)) + + info("metrics") + collection.addMetric("timing", 10) + collection.addMetric("timing", 20) + assert(listener.getMetrics() === Map("timing" -> Histogram(10, 20))) + collection.addMetric("timing", 10) + + assert(collection.getMetrics() === Map("timing" -> Distribution(Histogram(10, 20, 10)))) + } + + + test("tracks stats only from the point a listener was attached, but report all keys") { + val context = new StatsListenerInstanceContext + import context._ + + collection.incr("a", 5) + collection.incr("b", 5) + collection.addMetric("beans", 5) + collection.addMetric("rice", 5) + val listener3 = new StatsListener(collection) + collection.incr("a", 70) + collection.incr("a", 300) + collection.addMetric("beans", 3) + assert(listener3.getCounters() === Map("a" -> 370, "b" -> 0)) + assert(listener3.getMetrics() === + Map("beans" -> Histogram(3), + "rice" -> Histogram())) + } + + test("latch to the top of a period") { + val collection = new StatsCollection() + val listener = new LatchedStatsListener(collection, 1.second) { + override lazy val service = new PeriodicBackgroundProcess("", 1.second) { + def periodic() { } + } + } + + var gauge = 0 + collection.incr("counter", 5) + collection.addGauge("gauge") { synchronized { gauge } } + collection.setLabel("label", "HIMYNAMEISBRAK") + collection.addMetric("metric", Distribution(Histogram(1, 2))) + + assert(listener.getCounters() === Map()) + assert(listener.getGauges() === Map()) + assert(listener.getLabels() === Map()) + assert(listener.getMetrics() === Map()) + + listener.nextLatch() + + assert(listener.getCounters() === Map("counter" -> 5)) + assert(listener.getGauges() === Map("gauge" -> 0)) + assert(listener.getLabels() === Map("label" -> "HIMYNAMEISBRAK")) + assert(listener.getMetrics() === Map("metric" -> Histogram(1, 2))) + + collection.incr("counter", 3) + synchronized { gauge = 37 } + collection.setLabel("label", "EEPEEPIAMAMONKEY") + collection.addMetric("metric", Distribution(Histogram(3, 4, 5))) + + assert(listener.getCounters() === Map("counter" -> 5)) + assert(listener.getGauges() === Map("gauge" -> 0)) + assert(listener.getLabels() === Map("label" -> "HIMYNAMEISBRAK")) + assert(listener.getMetrics() === Map("metric" -> Histogram(1, 2))) + + listener.nextLatch() + + assert(listener.getCounters() === Map("counter" -> 3)) + assert(listener.getGauges() === Map("gauge" -> 37)) + assert(listener.getLabels() === Map("label" -> "EEPEEPIAMAMONKEY")) + assert(listener.getMetrics() === Map("metric" -> Histogram(3, 4, 5))) + } + +} diff --git a/src/test/scala/com/twitter/ostrich/stats/StatsSpec.scala b/src/test/scala/com/twitter/ostrich/stats/StatsTest.scala similarity index 60% rename from src/test/scala/com/twitter/ostrich/stats/StatsSpec.scala rename to src/test/scala/com/twitter/ostrich/stats/StatsTest.scala index 282410f3..277dcd30 100644 --- a/src/test/scala/com/twitter/ostrich/stats/StatsSpec.scala +++ b/src/test/scala/com/twitter/ostrich/stats/StatsTest.scala @@ -18,16 +18,19 @@ package com.twitter.ostrich.stats import com.twitter.conversions.time._ import com.twitter.util.Time -import org.specs.SpecificationWithJUnit +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import org.scalatest.FunSuite -class StatsSpec extends SpecificationWithJUnit { - "Stats" should { - "delta" in { - Stats.delta(0, 5) mustEqual 5 - Stats.delta(Long.MaxValue - 10, Long.MaxValue) mustEqual 10 - Stats.delta(-4000, -3000) mustEqual 1000 - Stats.delta(Long.MaxValue, Long.MinValue) mustEqual 1 - Stats.delta(Long.MaxValue - 5, Long.MinValue + 3) mustEqual 9 - } +@RunWith(classOf[JUnitRunner]) +class StatsTest extends FunSuite { + + test("delta") { + assert(Stats.delta(0, 5) === 5) + assert(Stats.delta(Long.MaxValue - 10, Long.MaxValue) === 10) + assert(Stats.delta(-4000, -3000) === 1000) + assert(Stats.delta(Long.MaxValue, Long.MinValue) === 1) + assert(Stats.delta(Long.MaxValue - 5, Long.MinValue + 3) === 9) } + } diff --git a/src/test/scala/com/twitter/ostrich/stats/W3CStatsLoggerSpec.scala b/src/test/scala/com/twitter/ostrich/stats/W3CStatsLoggerSpec.scala deleted file mode 100644 index 8c9ecb98..00000000 --- a/src/test/scala/com/twitter/ostrich/stats/W3CStatsLoggerSpec.scala +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright 2009 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich -package stats - -import scala.collection.immutable -import com.twitter.conversions.string._ -import com.twitter.conversions.time._ -import com.twitter.logging.{BareFormatter, Level, Logger, StringHandler} -import com.twitter.util.Time -import org.specs.SpecificationWithJUnit - -class W3CStatsLoggerSpec extends SpecificationWithJUnit { - "W3CStatsLogger" should { - val logger = Logger.get("w3c") - - var handler: StringHandler = null - var collection: StatsCollection = null - var statsLogger: W3CStatsLogger = null - - def getLines() = { - val rv = handler.get.split("\n").toList.filter { s => s.startsWith("#Fields") || !s.startsWith("#") } - handler.clear() - rv - } - - doBefore { - handler = new StringHandler(BareFormatter, None) - logger.addHandler(handler) - logger.setUseParentHandlers(false) - logger.setLevel(Level.INFO) - - collection = new StatsCollection() - handler.clear() - statsLogger = new W3CStatsLogger(logger, 1.second, collection) - } - - "log basic stats" in { - collection.incr("cats") - collection.incr("dogs", 3) - statsLogger.periodic() - getLines() mustEqual "#Fields: cats dogs" :: "948200938 1 3" :: Nil - } - - "log timings" in { - Time.withCurrentTimeFrozen { time => - collection.time("zzz") { time advance 10.milliseconds } - collection.time("zzz") { time advance 20.milliseconds } - statsLogger.periodic() - getLines() mustEqual List( - "#Fields: zzz_msec_average zzz_msec_count zzz_msec_maximum zzz_msec_minimum zzz_msec_sum", - "1176525931 15 2 19 10 30" - ) - } - } - - "log multiple lines" in { - Time.withCurrentTimeFrozen { time => - collection.incr("cats") - collection.incr("dogs", 3) - collection.time("zzz") { time advance 10.milliseconds } - statsLogger.periodic() - collection.incr("cats") - collection.time("zzz") { time advance 20.milliseconds } - statsLogger.periodic() - getLines() mustEqual List( - "#Fields: cats dogs zzz_msec_average zzz_msec_count zzz_msec_maximum zzz_msec_minimum zzz_msec_sum", - "2826312472 1 3 10 1 10 10 10", - "2826312472 1 0 20 1 19 19 20" - ) - } - } - - "not repeat the header too often" in { - Time.withCurrentTimeFrozen { time => - collection.incr("cats") - statsLogger.periodic() - getLines() mustEqual "#Fields: cats" :: "2001103910 1" :: Nil - collection.incr("cats") - statsLogger.periodic() - getLines() mustEqual "2001103910 1" :: Nil - time advance 10.minutes - collection.incr("cats") - statsLogger.periodic() - getLines() mustEqual "#Fields: cats" :: "2001103910 1" :: Nil - } - } - - "repeat the header when the fields change" in { - collection.incr("cats") - statsLogger.periodic() - getLines() mustEqual "#Fields: cats" :: "2001103910 1" :: Nil - collection.incr("cats") - statsLogger.periodic() - getLines() mustEqual "2001103910 1" :: Nil - collection.incr("cats") - collection.incr("dogs") - statsLogger.periodic() - getLines() mustEqual "#Fields: cats dogs" :: "948200938 1 1" :: Nil - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/stats/W3CStatsLoggerTest.scala b/src/test/scala/com/twitter/ostrich/stats/W3CStatsLoggerTest.scala new file mode 100644 index 00000000..258d1891 --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/stats/W3CStatsLoggerTest.scala @@ -0,0 +1,132 @@ +/* + * Copyright 2009 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.stats + +import com.twitter.conversions.string._ +import com.twitter.conversions.time._ +import com.twitter.logging.{BareFormatter, Level, Logger, StringHandler} +import com.twitter.util.Time +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import org.scalatest.FunSuite + +@RunWith(classOf[JUnitRunner]) +class W3CStatsLoggerTest extends FunSuite { + + class Context { + val logger = Logger.get("w3c") + + var handler: StringHandler = null + var collection: StatsCollection = null + var statsLogger: W3CStatsLogger = null + + def getLines() = { + val rv = handler.get.split("\n").toList.filter { s => s.startsWith("#Fields") || !s.startsWith("#") } + handler.clear() + rv + } + + handler = new StringHandler(BareFormatter, None) + logger.addHandler(handler) + logger.setUseParentHandlers(false) + logger.setLevel(Level.INFO) + + collection = new StatsCollection() + handler.clear() + statsLogger = new W3CStatsLogger(logger, 1.second, collection) + } + + test("log basic stats") { + val context = new Context + import context._ + + collection.incr("cats") + collection.incr("dogs", 3) + statsLogger.periodic() + assert(getLines() === "#Fields: cats dogs" :: "948200938 1 3" :: Nil) + } + + test("log timings") { + val context = new Context + import context._ + + Time.withCurrentTimeFrozen { time => + collection.time("zzz") { time advance 10.milliseconds } + collection.time("zzz") { time advance 20.milliseconds } + statsLogger.periodic() + assert(getLines() === List( + "#Fields: zzz_msec_average zzz_msec_count zzz_msec_maximum zzz_msec_minimum zzz_msec_sum", + "1176525931 15 2 19 10 30" + )) + } + } + + test("log multiple lines") { + val context = new Context + import context._ + + Time.withCurrentTimeFrozen { time => + collection.incr("cats") + collection.incr("dogs", 3) + collection.time("zzz") { time advance 10.milliseconds } + statsLogger.periodic() + collection.incr("cats") + collection.time("zzz") { time advance 20.milliseconds } + statsLogger.periodic() + assert(getLines() === List( + "#Fields: cats dogs zzz_msec_average zzz_msec_count zzz_msec_maximum zzz_msec_minimum zzz_msec_sum", + "2826312472 1 3 10 1 10 10 10", + "2826312472 1 0 20 1 19 19 20" + )) + } + } + + test("not repeat the header too often") { + val context = new Context + import context._ + + Time.withCurrentTimeFrozen { time => + collection.incr("cats") + statsLogger.periodic() + assert(getLines() === "#Fields: cats" :: "2001103910 1" :: Nil) + collection.incr("cats") + statsLogger.periodic() + assert(getLines() === "2001103910 1" :: Nil) + time advance 10.minutes + collection.incr("cats") + statsLogger.periodic() + assert(getLines() === "#Fields: cats" :: "2001103910 1" :: Nil) + } + } + + test("repeat the header when the fields change") { + val context = new Context + import context._ + + collection.incr("cats") + statsLogger.periodic() + assert(getLines() === "#Fields: cats" :: "2001103910 1" :: Nil) + collection.incr("cats") + statsLogger.periodic() + assert(getLines() === "2001103910 1" :: Nil) + collection.incr("cats") + collection.incr("dogs") + statsLogger.periodic() + assert(getLines() === "#Fields: cats dogs" :: "948200938 1 1" :: Nil) + } + +} diff --git a/src/test/scala/com/twitter/ostrich/stats/W3CStatsSpec.scala b/src/test/scala/com/twitter/ostrich/stats/W3CStatsSpec.scala deleted file mode 100644 index b0b10fa9..00000000 --- a/src/test/scala/com/twitter/ostrich/stats/W3CStatsSpec.scala +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright 2009 Twitter, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. You may obtain - * a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.twitter.ostrich -package stats - -import java.text.SimpleDateFormat -import java.util.Date -import scala.collection.immutable -import com.twitter.conversions.string._ -import com.twitter.logging.{BareFormatter, Level, Logger, StringHandler} -import org.specs.SpecificationWithJUnit - -class W3CStatsSpec extends SpecificationWithJUnit { - "w3c Stats" should { - val logger = Logger.get("w3c") - var handler: StringHandler = null - - val fields = Array( - "backend-response-time_msec_average", - "backend-response-method", - "request-uri", - "backend-response-time_nsec_average", - "unsupplied-field", - "finish_timestamp", - "widgets", - "wodgets" - ) - val w3c = new W3CStats(logger, fields, false) - - doBefore { - handler = new StringHandler(BareFormatter, None) - logger.addHandler(handler) - logger.setUseParentHandlers(false) - logger.setLevel(Level.INFO) - - Logger.get("").setLevel(Level.OFF) - Stats.clearAll() - handler.clear() - } - - def getLine() = { - val rv = handler.get.split("\n").filter { line => !(line startsWith "#") }.head - handler.clear() - rv - } - - "can be called manually" in { - val counters = Map("widgets" -> 3L) - val gauges = Map("wodgets" -> 3.5) - val metrics = Map("backend-response-time_msec" -> new Distribution(Histogram(10))) - val labels = Map("request-uri" -> "/home") - w3c.write(StatsSummary(counters, metrics, gauges, labels)) - getLine() mustEqual "10 - /home - - - 3 3.5" - } - - "can be called transactionally" in { - w3c { stats => - val response: Int = stats.time[Int]("backend-response-time") { - stats.setLabel("backend-response-method", "GET") - stats.setLabel("request-uri", "/home") - 1 + 1 - } - response mustEqual 2 - - val response2: Int = stats.timeNanos[Int]("backend-response-time") { - 1 + 2 - } - response2 mustEqual 3 - - stats.setGauge("wodgets", 3.5) - } - - val entries: Array[String] = getLine().split(" ") - entries(0).toInt must be_>=(0) - entries(1) mustEqual "GET" - entries(2) mustEqual "/home" - entries(3).toInt must be_>=(10) //must take at least 10 ns! - entries(4) mustEqual "-" - entries(7) mustEqual "3.5" - } - - "empty stats returns the empty string" in { - w3c { stats => () } - // strip out all unfound entries, and remove all whitespace. after that, it should be empty. - getLine().replaceAll("-", "").trim() mustEqual "" - } - - "logging a field not tracked in the fields member shouldn't show up in the logfile" in { - w3c { stats => - stats.setLabel("jibberish_nonsense", "foo") - } - getLine() must notInclude("foo") - } - - "sum counts within a transaction" in { - w3c { stats => - stats.incr("widgets", 8) - stats.incr("widgets", 8) - } - getLine() mustEqual "- - - - - - 16 -" - } - - "logs metrics only once" in { - w3c { stats => - stats.addMetric("backend-response-time_msec", 9) - stats.addMetric("backend-response-time_msec", 13) - } - getLine() mustEqual "11 - - - - - - -" - w3c { stats => - stats.addMetric("backend-response-time_msec", 9) - } - getLine() mustEqual "9 - - - - - - -" - } - } -} diff --git a/src/test/scala/com/twitter/ostrich/stats/W3CStatsTest.scala b/src/test/scala/com/twitter/ostrich/stats/W3CStatsTest.scala new file mode 100644 index 00000000..c2348418 --- /dev/null +++ b/src/test/scala/com/twitter/ostrich/stats/W3CStatsTest.scala @@ -0,0 +1,149 @@ +/* + * Copyright 2009 Twitter, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.twitter.ostrich.stats + +import com.twitter.conversions.string._ +import com.twitter.logging.{BareFormatter, Level, Logger, StringHandler} +import java.text.SimpleDateFormat +import java.util.Date +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import org.scalatest.FunSuite + +@RunWith(classOf[JUnitRunner]) +class W3CStatsTest extends FunSuite { + + class Context { + val logger = Logger.get("w3c") + var handler: StringHandler = null + + val fields = Array( + "backend-response-time_msec_average", + "backend-response-method", + "request-uri", + "backend-response-time_nsec_average", + "unsupplied-field", + "finish_timestamp", + "widgets", + "wodgets" + ) + + val w3c = new W3CStats(logger, fields, false) + + handler = new StringHandler(BareFormatter, None) + logger.addHandler(handler) + logger.setUseParentHandlers(false) + logger.setLevel(Level.INFO) + + Logger.get("").setLevel(Level.OFF) + Stats.clearAll() + handler.clear() + + def getLine() = { + val rv = handler.get.split("\n").filter { line => !(line startsWith "#") }.head + handler.clear() + rv + } + } + + test("can be called manually") { + val context = new Context + import context._ + + val counters = Map("widgets" -> 3L) + val gauges = Map("wodgets" -> 3.5) + val metrics = Map("backend-response-time_msec" -> new Distribution(Histogram(10))) + val labels = Map("request-uri" -> "/home") + w3c.write(StatsSummary(counters, metrics, gauges, labels)) + assert(getLine() === "10 - /home - - - 3 3.5") + } + + test("can be called transactionally") { + val context = new Context + import context._ + + w3c { stats => + val response: Int = stats.time[Int]("backend-response-time") { + stats.setLabel("backend-response-method", "GET") + stats.setLabel("request-uri", "/home") + 1 + 1 + } + assert(response === 2) + + val response2: Int = stats.timeNanos[Int]("backend-response-time") { + 1 + 2 + } + assert(response2 === 3) + + stats.setGauge("wodgets", 3.5) + } + + val entries: Array[String] = getLine().split(" ") + assert(entries(0).toInt >= 0) + assert(entries(1) === "GET") + assert(entries(2) === "/home") + assert(entries(3).toInt >= 10) //must take at least 10 ns! + assert(entries(4) === "-") + assert(entries(7) === "3.5") + } + + test("empty stats returns the empty string") { + val context = new Context + import context._ + + w3c { stats => () } + // strip out all unfound entries, and remove all whitespace. after that, it should be empty. + assert(getLine().replaceAll("-", "").trim() === "") + } + + test("logging a field not tracked in the fields member shouldn't show up in the logfile") { + val context = new Context + import context._ + + w3c { stats => + stats.setLabel("jibberish_nonsense", "foo") + } + assert(!getLine().contains("foo")) + } + + test("sum counts within a transaction") { + val context = new Context + import context._ + + w3c { stats => + stats.incr("widgets", 8) + stats.incr("widgets", 8) + } + assert(getLine() === "- - - - - - 16 -") + } + + test("logs metrics only once") { + val context = new Context + import context._ + + w3c { stats => + stats.addMetric("backend-response-time_msec", 9) + stats.addMetric("backend-response-time_msec", 13) + } + assert(getLine() === "11 - - - - - - -") + w3c { stats => + stats.addMetric("backend-response-time_msec", 9) + } + assert(getLine() === "9 - - - - - - -") + } + +}