Skip to content

Commit

Permalink
delete some unnecessary code, more to go
Browse files Browse the repository at this point in the history
  • Loading branch information
squito committed Mar 11, 2015
1 parent 56edce0 commit 5598f19
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 95 deletions.
47 changes: 0 additions & 47 deletions core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,32 +24,6 @@ import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.ExecutorRunner

private[spark] object JsonProtocol {
def writeWorkerInfo(obj: WorkerInfo) = {
("id" -> obj.id) ~
("host" -> obj.host) ~
("port" -> obj.port) ~
("webuiaddress" -> obj.webUiAddress) ~
("cores" -> obj.cores) ~
("coresused" -> obj.coresUsed) ~
("coresfree" -> obj.coresFree) ~
("memory" -> obj.memory) ~
("memoryused" -> obj.memoryUsed) ~
("memoryfree" -> obj.memoryFree) ~
("state" -> obj.state.toString) ~
("lastheartbeat" -> obj.lastHeartbeat)
}

def writeApplicationInfo(obj: ApplicationInfo) = {
("starttime" -> obj.startTime) ~
("id" -> obj.id) ~
("name" -> obj.desc.name) ~
("cores" -> obj.desc.maxCores) ~
("user" -> obj.desc.user) ~
("memoryperslave" -> obj.desc.memoryPerSlave) ~
("submitdate" -> obj.submitDate.toString) ~
("state" -> obj.state.toString) ~
("duration" -> obj.duration)
}

def writeApplicationDescription(obj: ApplicationDescription) = {
("name" -> obj.name) ~
Expand All @@ -66,27 +40,6 @@ private[spark] object JsonProtocol {
("appdesc" -> writeApplicationDescription(obj.appDesc))
}

def writeDriverInfo(obj: DriverInfo) = {
("id" -> obj.id) ~
("starttime" -> obj.startTime.toString) ~
("state" -> obj.state.toString) ~
("cores" -> obj.desc.cores) ~
("memory" -> obj.desc.mem)
}

def writeMasterState(obj: MasterStateResponse) = {
("url" -> obj.uri) ~
("workers" -> obj.workers.toList.map(writeWorkerInfo)) ~
("cores" -> obj.workers.map(_.cores).sum) ~
("coresused" -> obj.workers.map(_.coresUsed).sum) ~
("memory" -> obj.workers.map(_.memory).sum) ~
("memoryused" -> obj.workers.map(_.memoryUsed).sum) ~
("activeapps" -> obj.activeApps.toList.map(writeApplicationInfo)) ~
("completedapps" -> obj.completedApps.toList.map(writeApplicationInfo)) ~
("activedrivers" -> obj.activeDrivers.toList.map(writeDriverInfo)) ~
("status" -> obj.status.toString)
}

def writeWorkerState(obj: WorkerStateResponse) = {
("id" -> obj.workerId) ~
("masterurl" -> obj.masterUrl) ~
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,18 +32,6 @@ import org.apache.spark.SparkConf

class JsonProtocolSuite extends FunSuite {

test("writeApplicationInfo") {
val output = JsonProtocol.writeApplicationInfo(createAppInfo())
assertValidJson(output)
assertValidDataInJson(output, JsonMethods.parse(JsonConstants.appInfoJsonStr))
}

test("writeWorkerInfo") {
val output = JsonProtocol.writeWorkerInfo(createWorkerInfo())
assertValidJson(output)
assertValidDataInJson(output, JsonMethods.parse(JsonConstants.workerInfoJsonStr))
}

test("writeApplicationDescription") {
val output = JsonProtocol.writeApplicationDescription(createAppDesc())
assertValidJson(output)
Expand All @@ -56,26 +44,6 @@ class JsonProtocolSuite extends FunSuite {
assertValidDataInJson(output, JsonMethods.parse(JsonConstants.executorRunnerJsonStr))
}

test("writeDriverInfo") {
val output = JsonProtocol.writeDriverInfo(createDriverInfo())
assertValidJson(output)
assertValidDataInJson(output, JsonMethods.parse(JsonConstants.driverInfoJsonStr))
}

test("writeMasterState") {
val workers = Array(createWorkerInfo(), createWorkerInfo())
val activeApps = Array(createAppInfo())
val completedApps = Array[ApplicationInfo]()
val activeDrivers = Array(createDriverInfo())
val completedDrivers = Array(createDriverInfo())
val stateResponse = new MasterStateResponse(
"host", 8080, None, workers, activeApps, completedApps,
activeDrivers, completedDrivers, RecoveryState.ALIVE)
val output = JsonProtocol.writeMasterState(stateResponse)
assertValidJson(output)
assertValidDataInJson(output, JsonMethods.parse(JsonConstants.masterStateJsonStr))
}

test("writeWorkerState") {
val executors = List[ExecutorRunner]()
val finishedExecutors = List[ExecutorRunner](createExecutorRunner(), createExecutorRunner())
Expand All @@ -93,13 +61,6 @@ class JsonProtocolSuite extends FunSuite {
new ApplicationDescription("name", Some(4), 1234, cmd, "appUiUrl")
}

def createAppInfo() : ApplicationInfo = {
val appInfo = new ApplicationInfo(JsonConstants.appInfoStartTime,
"id", createAppDesc(), JsonConstants.submitDate, null, Int.MaxValue)
appInfo.endTime = JsonConstants.currTimeInMillis
appInfo
}

def createDriverCommand() = new Command(
"org.apache.spark.FakeClass", Seq("some arg --and-some options -g foo"),
Map(("K1", "V1"), ("K2", "V2")), Seq("cp1", "cp2"), Seq("lp1", "lp2"), Seq("-Dfoo")
Expand All @@ -108,15 +69,6 @@ class JsonProtocolSuite extends FunSuite {
def createDriverDesc() = new DriverDescription("hdfs://some-dir/some.jar", 100, 3,
false, createDriverCommand())

def createDriverInfo(): DriverInfo = new DriverInfo(3, "driver-3",
createDriverDesc(), new Date())

def createWorkerInfo(): WorkerInfo = {
val workerInfo = new WorkerInfo("id", "host", 8080, 4, 1234, null, 80, "publicAddress")
workerInfo.lastHeartbeat = JsonConstants.currTimeInMillis
workerInfo
}

def createExecutorRunner(): ExecutorRunner = {
new ExecutorRunner("appId", 123, createAppDesc(), 4, 1234, null, "workerId", "host", 123,
"publicAddress", new File("sparkHome"), new File("workDir"), "akka://worker",
Expand Down

0 comments on commit 5598f19

Please sign in to comment.