Skip to content
This repository has been archived by the owner on Feb 4, 2021. It is now read-only.

Revert prior commits updating project settings and dependencies #12

Merged
merged 1 commit into from
Nov 27, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,3 @@ target/
.idea/
.idea_modules/
.DS_Store
.sbt/
.ivy2/
3 changes: 1 addition & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,9 @@ sudo: false
cache:
directories:
- $HOME/.ivy2
- $HOME/.sbt
matrix:
include:
- jdk: openjdk8
- jdk: openjdk7
scala: 2.11.8
python: 3.5
env: TEST_SPARK_VERSION="2.1.0" AWS_ACCESS_KEY_ID=foo AWS_SECRET_ACCESS_KEY=foo
Expand Down
25 changes: 5 additions & 20 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,39 +3,24 @@
[![CircleCi](https://circleci.com/gh/mozilla/moztelemetry.svg?style=shield&circle-token=3fff2168f7d8da61b47bd1481c4fcb984ec88ef5)](https://circleci.com/gh/mozilla/moztelemetry)

# moztelemetry

Mozilla's Telemetry API for Scala

## Using moztelemetry

In SBT:
```
resolvers += "S3 local maven snapshots" at "s3://net-mozaws-data-us-west-2-ops-mavenrepo/snapshots"
libraryDependencies += "com.mozilla.telemetry" %% "moztelemetry" % "1.0-SNAPSHOT"
```

## Testing

To run the tests, build the docker container and run the tests.

Build the container. You only have to do this once or when you update the Dockerfile:

```
docker build -t moztelemetry .
```

Run the tests in the docker container:
To run the tests you have to start a mock S3 service first with moto:

```
./bin/test
```

Other test tasks can by run by passing the task through the test script, e.g.:

```
./bin/test "testOnly com.mozilla.telemetry.stats.StatsTest"
pip install moto
moto_server s3 -p 8001 &
AWS_ACCESS_KEY_ID=foo AWS_SECRET_ACCESS_KEY=foo sbt test
```

## Publishing snapshots

Snapshots will now be published to our local maven repo in s3 on every commit merged into master via a circleci build

16 changes: 12 additions & 4 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -6,26 +6,32 @@ organization := "com.mozilla.telemetry"

scalaVersion := "2.11.8"

val sparkVersion = "2.2.0"
sparkVersion := "2.1.0"

sparkComponents ++= Seq("core")

resolvers += Resolver.bintrayRepo("findify", "maven")

libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-core" % sparkVersion % "provided",
"org.scalatest" %% "scalatest" % "2.2.6" % "test",
"commons-io" % "commons-io" % "1.3.2" % "test",
"org.apache.commons" % "commons-io" % "1.3.2" % "test",
"com.github.seratch" %% "awscala" % "0.5.+",
"com.amazonaws" % "aws-java-sdk" % "1.11.83",
"com.google.protobuf" % "protobuf-java" % "2.5.0"
)

/*
The HBase client requires protobuf-java 2.5.0 but scalapb uses protobuf-java 3.x
so we have to force the dependency here. This should be fine as we are using only
version 2 of the protobuf spec.
*/
dependencyOverrides += "com.google.protobuf" % "protobuf-java" % "2.5.0"

// Shade PB files
assemblyShadeRules in assembly := Seq(
ShadeRule.rename("com.google.protobuf.**" -> "shadeproto.@1").inAll,
ShadeRule.rename("com.trueaccord.scalapb.**" -> "shadescalapb.@1").inAll
)

// Compile proto files
PB.targets in Compile := Seq(
scalapb.gen() -> (sourceManaged in Compile).value
Expand All @@ -34,6 +40,8 @@ PB.targets in Compile := Seq(
// Exclude generated classes from the coverage
coverageExcludedPackages := "com\\.mozilla\\.telemetry\\.heka\\.(Field|Message|Header)"

credentials += Credentials(Path.userHome / ".ivy2" / ".sbtcredentials")

publishMavenStyle := true

publishTo := {
Expand Down
26 changes: 17 additions & 9 deletions circle.yml
Original file line number Diff line number Diff line change
@@ -1,23 +1,31 @@
machine:
pre:
# Install sbt 1.0.3
# Install sbt 0.13.16
- sudo apt-get install openjdk-8-jdk
- wget -q https://dl.bintray.com/sbt/debian/sbt-1.0.3.deb
- sudo dpkg -i sbt-1.0.3.deb
- wget -q https://dl.bintray.com/sbt/debian/sbt-0.13.16.deb
- sudo dpkg -i sbt-0.13.16.deb
cache_directories:
- "~/.ivy2"
- "~/.sbt"
services:
- docker

dependencies:
override:
- docker info
- docker build --rm=false -t circleci/moztelemetry:$CIRCLE_SHA1 .
pre:
- sudo apt-get update
- sudo apt-get install libffi-dev libssl-dev python-dev
- sudo apt-get install python
- python --version
- curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
- sudo python get-pip.py
- sudo pip install --upgrade pip
- sudo pip install --upgrade setuptools
- sudo pip install pyopenssl ndg-httpsclient pyasn1
- sudo pip install moto --ignore-installed six
- sudo pip install flask
- moto_server s3 -p 8001 &

test:
override:
- docker run -t -i -p 8001:8001 circleci/moztelemetry:$CIRCLE_SHA1 ./bin/run_tests
- 'true'

deployment:
latest:
Expand Down
1 change: 0 additions & 1 deletion project/build.properties

This file was deleted.

10 changes: 6 additions & 4 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
resolvers += "bintray-spark-packages" at "https://dl.bintray.com/spark-packages/maven/"

addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.1")
addSbtPlugin("org.spark-packages" % "sbt-spark-package" % "0.2.5")

addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.0")

addSbtPlugin("com.thesamet" % "sbt-protoc" % "0.99.12")
addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.8.0")

addSbtPlugin("com.thesamet" % "sbt-protoc" % "0.99.3")

addSbtPlugin("com.frugalmechanic" % "fm-sbt-s3-resolver" % "0.12.0")

libraryDependencies += "com.trueaccord.scalapb" %% "compilerplugin" % "0.6.6"
libraryDependencies += "com.trueaccord.scalapb" %% "compilerplugin" % "0.5.47"
20 changes: 10 additions & 10 deletions src/main/scala/com/mozilla/telemetry/heka/package.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package object heka {
private def field(f: Field): Any = {
// I am assuming there is only one value
f.getValueType match {
case Field.ValueTypeEnum.BYTES => {
case Field.ValueType.BYTES => {
val bytes = f.valueBytes(0)
// Our JSON bytes fields sometimes contain non-UTF8 strings that can
// still be parsed as JSON. For now, we attempt to coerce all bytes
Expand All @@ -28,10 +28,10 @@ package object heka {
// for details.
bytes.toStringUtf8
}
case Field.ValueTypeEnum.STRING => f.valueString(0)
case Field.ValueTypeEnum.BOOL => f.valueBool(0)
case Field.ValueTypeEnum.DOUBLE => f.valueDouble(0)
case Field.ValueTypeEnum.INTEGER => f.valueInteger(0)
case Field.ValueType.STRING => f.valueString(0)
case Field.ValueType.BOOL => f.valueBool(0)
case Field.ValueType.DOUBLE => f.valueDouble(0)
case Field.ValueType.INTEGER => f.valueInteger(0)
case _ => assert(false)
}
}
Expand All @@ -42,19 +42,19 @@ package object heka {
def apply (uuid: String, fieldsMap: Map[String, Any], payload: Option[String], timestamp: Long=0): Message = {
val fields = fieldsMap.toList.map{
case (k: String, v: ByteString) => {
Field(k, Some(Field.ValueTypeEnum.BYTES), valueBytes=Seq(v))
Field(k, Some(Field.ValueType.BYTES), valueBytes=Seq(v))
}
case (k: String, v: String) => {
Field(k, Some(Field.ValueTypeEnum.STRING), valueString=Seq(v))
Field(k, Some(Field.ValueType.STRING), valueString=Seq(v))
}
case (k: String, v: Boolean) => {
Field(k, Some(Field.ValueTypeEnum.BOOL), valueBool=Seq(v))
Field(k, Some(Field.ValueType.BOOL), valueBool=Seq(v))
}
case (k: String, v: Double) => {
Field(k, Some(Field.ValueTypeEnum.DOUBLE), valueDouble=Seq(v))
Field(k, Some(Field.ValueType.DOUBLE), valueDouble=Seq(v))
}
case (k: String, v: Long) => {
Field(k, Some(Field.ValueTypeEnum.INTEGER), valueInteger=Seq(v))
Field(k, Some(Field.ValueType.INTEGER), valueInteger=Seq(v))
}
}.toSeq
Message(ByteString.copyFromUtf8(uuid), timestamp, payload=payload, fields=fields)
Expand Down