Skip to content

Commit

Permalink
Add slack telemetry messages, remove NewRelic (#4231)
Browse files Browse the repository at this point in the history
* Add slack telemetry messages for sql errors

* pretty backend

* remove newrelic code
  • Loading branch information
fm3 authored Aug 14, 2019
1 parent ef80788 commit 92bceb6
Show file tree
Hide file tree
Showing 14 changed files with 63 additions and 43 deletions.
5 changes: 0 additions & 5 deletions app/ErrorHandler.scala
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import com.mohiva.play.silhouette.api.actions.SecuredErrorHandler
import com.newrelic.api.agent.NewRelic
import javax.inject._
import play.api.http.DefaultHttpErrorHandler
import play.api._
Expand All @@ -26,8 +25,4 @@ class ErrorHandler @Inject()(env: Environment,
override def onNotAuthorized(implicit request: RequestHeader): Future[Result] =
Future.successful(Forbidden(Messages("notAllowed")))

override def onServerError(request: RequestHeader, ex: Throwable): Future[Result] = {
NewRelic.noticeError(ex)
super.onServerError(request, ex)
}
}
8 changes: 5 additions & 3 deletions app/Startup.scala
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
import akka.actor.{ActorSystem, Props}
import com.newrelic.api.agent.NewRelic
import com.scalableminds.util.accesscontext.GlobalAccessContext
import com.scalableminds.util.mail.{Mailer, MailerConfig}
import com.typesafe.scalalogging.LazyLogging
import controllers.InitialDataService
import io.apigee.trireme.core.NodeEnvironment
import java.io.File

import javax.inject._
import models.annotation.AnnotationDAO
import net.liftweb.common.{Failure, Full}
import oxalis.cleanup.CleanUpService
import oxalis.security.{WkEnv, WkSilhouetteEnvironment}
import com.mohiva.play.silhouette.api.Silhouette
import oxalis.telemetry.SlackNotificationService.SlackNotificationService
import play.api.inject.ApplicationLifecycle
import utils.{SQLClient, WkConf}

Expand All @@ -27,7 +28,8 @@ class Startup @Inject()(actorSystem: ActorSystem,
annotationDAO: AnnotationDAO,
wkSilhouetteEnvironment: WkSilhouetteEnvironment,
lifecycle: ApplicationLifecycle,
sqlClient: SQLClient)
sqlClient: SQLClient,
slackNotificationService: SlackNotificationService)
extends LazyLogging {

logger.info("Executing Startup")
Expand Down Expand Up @@ -84,7 +86,7 @@ class Startup @Inject()(actorSystem: ActorSystem,
} else {
val errorMessage = new StringBuilder("Database schema does not fit to schema.sql!")
logger.error(errorMessage.toString())
NewRelic.noticeError(errorMessage.toString())
slackNotificationService.noticeError(errorMessage.toString())
}

Future.successful(())
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
package oxalis.telemetry.SlackNotificationService

import com.scalableminds.webknossos.datastore.rpc.RPC
import com.typesafe.scalalogging.LazyLogging
import javax.inject.Inject
import play.api.libs.json.{JsObject, Json}
import utils.WkConf

class SlackNotificationService @Inject()(rpc: RPC, conf: WkConf) extends LazyLogging {

lazy val url: String = conf.SlackNotifications.url

def noticeError(ex: Throwable, message: String): Unit =
noticeError(ex.toString + ": " + ex.getLocalizedMessage + "\n" + message)

def noticeError(msg: String): Unit =
if (url != "empty") {
logger.info(s"Sending Slack notification: $msg")
rpc(url).postJson(
Json.obj(
"attachments" -> Json.arr(
Json.obj(
"title" -> s"Notification from webKnossos at ${conf.Http.uri}",
"text" -> msg,
"color" -> "#ff8a00"
))))
}
}
11 changes: 6 additions & 5 deletions app/utils/SQLHelpers.scala
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
package utils

import com.newrelic.api.agent.NewRelic
import com.scalableminds.util.accesscontext.DBAccessContext
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.typesafe.scalalogging.LazyLogging
import javax.inject.Inject
import models.user.User
import net.liftweb.common.Full
import oxalis.security.{SharingTokenContainer, UserSharingTokenContainer}
import oxalis.telemetry.SlackNotificationService.SlackNotificationService
import play.api.Configuration
import play.api.libs.json.{Json, JsonValidationError, Reads}
import reactivemongo.bson.BSONObjectID
Expand All @@ -22,8 +22,9 @@ import play.api.data.validation.ValidationError

import scala.concurrent.ExecutionContext

class SQLClient @Inject()(configuration: Configuration) {
class SQLClient @Inject()(configuration: Configuration, slackNotificationService: SlackNotificationService) {
lazy val db: PostgresProfile.backend.Database = Database.forConfig("slick.db", configuration.underlying)
def getSlackNotificationService = slackNotificationService
}

case class ObjectId(id: String) {
Expand Down Expand Up @@ -71,7 +72,7 @@ class SimpleSQLDAO @Inject()(sqlClient: SQLClient)(implicit ec: ExecutionContext
run(query, retryCount - 1, retryIfErrorContains)
} else {
logError(e, query)
reportErrorToNewrelic(e, query)
reportErrorToSlack(e, query)
Fox.failure("SQL Failure: " + e.getMessage)
}
}
Expand All @@ -85,8 +86,8 @@ class SimpleSQLDAO @Inject()(sqlClient: SQLClient)(implicit ec: ExecutionContext
logger.debug("Caused by query:\n" + query.getDumpInfo.mainInfo)
}

private def reportErrorToNewrelic[R](ex: Throwable, query: DBIOAction[R, NoStream, Nothing]) =
NewRelic.noticeError(ex, Map("Causing query: " -> query.getDumpInfo.mainInfo).asJava)
private def reportErrorToSlack[R](ex: Throwable, query: DBIOAction[R, NoStream, Nothing]) =
sqlClient.getSlackNotificationService.noticeError(ex, s"Causing query: ${query.getDumpInfo.mainInfo}")

def writeArrayTuple(elements: List[String]): String = {
val commaSeparated = elements.mkString(",")
Expand Down
4 changes: 4 additions & 0 deletions app/utils/WkConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,10 @@ class WkConf @Inject()(configuration: Configuration) extends ConfigReader {
val environment = get[String]("airbrake.environment")
}

object SlackNotifications {
val url = get[String]("slackNotifications.url")
}

object Google {
object Analytics {
val trackingID = get[String]("google.analytics.trackingID")
Expand Down
3 changes: 1 addition & 2 deletions app/views/main.scala.html
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
media="screen"
href="/assets/bundle/main.css?nocache=@(webknossos.BuildInfo.commitHash)"
/>
@Html(com.newrelic.api.agent.NewRelic.getBrowserTimingHeader)
@if(conf.Application.Authentication.enableDevAutoLogin){
<script src="/api/auth/autoLogin"></script>
}
Expand Down Expand Up @@ -65,6 +64,6 @@
ga("set", "anonymizeIp", true);
ga("send", "pageview");
</script>
} @Html(com.newrelic.api.agent.NewRelic.getBrowserTimingFooter)
}
</body>
</html>
4 changes: 4 additions & 0 deletions conf/application.conf
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,10 @@ airbrake { # To be defined in secrets.conf
projectID = "empty"
}

slackNotifications {
url = "empty"
}

mail.reply = "No reply <[email protected]>"

# other settings
Expand Down
5 changes: 0 additions & 5 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import sbt._
object Dependencies {
val akkaVersion = "2.5.22"
val log4jVersion = "2.0-beta9"
val newrelicVersion = "3.44.1"
val webknossosWrapVersion = "1.1.7"

val akkaAgent = "com.typesafe.akka" %% "akka-agent" % akkaVersion
Expand All @@ -23,8 +22,6 @@ object Dependencies {
val liftUtil = "net.liftweb" %% "lift-util" % "3.0.2"
val log4jApi = "org.apache.logging.log4j" % "log4j-core" % log4jVersion
val log4jCore = "org.apache.logging.log4j" % "log4j-api" % log4jVersion
val newrelic = "com.newrelic.agent.java" % "newrelic-agent" % newrelicVersion
val newrelicApi = "com.newrelic.agent.java" % "newrelic-api" % newrelicVersion
val playFramework = "com.typesafe.play" %% "play" % "2.7.1"
val playJson = "com.typesafe.play" %% "play-json" % "2.7.2"
val playIteratees = "com.typesafe.play" %% "play-iteratees" % "2.6.1"
Expand Down Expand Up @@ -73,8 +70,6 @@ object Dependencies {
akkaLogging,
ehcache,
gson,
newrelic,
newrelicApi,
webknossosWrap,
playIterateesStreams,
filters,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,13 @@ package com.scalableminds.webknossos.datastore.dataformats

import java.util.concurrent.TimeoutException

import com.newrelic.api.agent.NewRelic
import com.scalableminds.webknossos.datastore.models.BucketPosition
import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction
import com.scalableminds.webknossos.datastore.storage.DataCubeCache
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.typesafe.scalalogging.LazyLogging
import net.liftweb.common.Failure

import collection.JavaConverters._
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{Await, ExecutionContext, Future}

Expand All @@ -27,17 +25,13 @@ trait BucketProvider extends FoxImplicits with LazyLogging {
val className = this.getClass.getName.split("\\.").last
val result = Await.result(loadFromUnderlying(readInstruction).futureBox, timeout)
val duration = System.currentTimeMillis - t
NewRelic.recordResponseTimeMetric(s"Custom/BucketProvider/$className/file-response-time", duration)
NewRelic.incrementCounter(s"Custom/BucketProvider/$className/files-loaded")
if (duration > 500) {
NewRelic.noticeError(
s"loading file in $className took too long",
Map(
"duration" -> duration.toString,
"dataSource" -> readInstruction.dataSource.id.name,
"dataLayer" -> readInstruction.dataLayer.name,
"cube" -> readInstruction.cube.toString
).asJava
logger.warn(
s"loading file in $className took too long.\n"
+ s" duration: $duration\n"
+ s" dataSource: ${readInstruction.dataSource.id.name}\n"
+ s" dataLayer: ${readInstruction.dataLayer.name}\n"
+ s" cube: ${readInstruction.cube}"
)
}
result
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import java.nio.MappedByteBuffer
import java.nio.channels.FileChannel
import java.nio.file.Path

import com.newrelic.api.agent.NewRelic
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, Cube}
import com.scalableminds.webknossos.datastore.models._
import com.scalableminds.webknossos.datastore.models.datasource.DataLayer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,14 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient) extends FoxIm
parseProtoResponse(performRequest)(companion)
}

def postJson[J: Writes](body: J = Json.obj()): Unit = {
request = request
.addHttpHeaders(HeaderNames.CONTENT_TYPE -> "application/json")
.withBody(Json.toJson(body))
.withMethod("POST")
performRequest
}

def postProtoWithJsonResponse[T <: GeneratedMessage with Message[T], J: Reads](body: T): Fox[J] = {
request = request
.addHttpHeaders(HeaderNames.CONTENT_TYPE -> "application/x-protobuf")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import com.scalableminds.webknossos.datastore.models.datasource.DataLayerMapping
import com.typesafe.scalalogging.LazyLogging
import com.google.gson.JsonParseException
import com.google.gson.stream.JsonReader
import com.newrelic.api.agent.NewRelic

import scala.collection.mutable
import scala.reflect.ClassTag
Expand Down Expand Up @@ -56,7 +55,7 @@ object MappingParser extends LazyLogging {
jsonReader.endObject()

val end = System.currentTimeMillis()
NewRelic.recordMetric("Custom/FileDataStore/MappingParser/parsingTime", end - start)
logger.info(s"Mapping parsing took ${end - start} ms")

for {
name <- nameOpt
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package com.scalableminds.webknossos.datastore.storage

import com.newrelic.api.agent.NewRelic
import com.scalableminds.webknossos.datastore.dataformats.Cube
import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction
import com.scalableminds.util.cache.LRUConcurrentCache
Expand Down Expand Up @@ -57,8 +56,6 @@ class DataCubeCache(val maxEntries: Int) extends LRUConcurrentCache[CachedCube,
}.toFox

put(cachedCubeInfo, cubeFox)
NewRelic.incrementCounter("Custom/FileDataStore/Cache/miss")
NewRelic.recordMetric("Custom/FileDataStore/Cache/size", size())

cubeFox.flatMap { cube =>
val result = f(cube)
Expand All @@ -71,7 +68,6 @@ class DataCubeCache(val maxEntries: Int) extends LRUConcurrentCache[CachedCube,
case Some(cubeFox) =>
cubeFox.flatMap { cube =>
if (cube.tryAccess()) {
NewRelic.incrementCounter("Custom/FileDataStore/Cache/hit")
val result = f(cube)
cube.finishAccess()
result.toFox
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package com.scalableminds.webknossos.datastore.storage

import com.newrelic.api.agent.NewRelic
import com.scalableminds.util.cache.LRUConcurrentCache
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.dataformats.Cube
Expand Down Expand Up @@ -48,8 +47,6 @@ class ParsedMappingCache(val maxEntries: Int)
}.toFox

put(cachedMappingInfo, mappingFox)
NewRelic.incrementCounter("Custom/FileDataStore/MappingCache/miss")
NewRelic.recordMetric("Custom/FileDataStore/MappingCache/size", size())

mappingFox.map { mapping =>
f(mapping)
Expand All @@ -59,7 +56,6 @@ class ParsedMappingCache(val maxEntries: Int)
get(cachedMappingInfo) match {
case Some(mappingFox) =>
mappingFox.map { mapping =>
NewRelic.incrementCounter("Custom/FileDataStore/MappingCache/hit")
f(mapping)
}
case _ => handleUncachedMapping()
Expand Down

0 comments on commit 92bceb6

Please sign in to comment.