Skip to content

Commit

Permalink
cleanup, combine stage-related paths into one resource
Browse files Browse the repository at this point in the history
  • Loading branch information
squito committed Mar 31, 2015
1 parent aaba896 commit b2efcaf
Show file tree
Hide file tree
Showing 4 changed files with 82 additions and 160 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -67,17 +67,6 @@ private[v1] class JsonRootResource extends UIRootFromServletContext {
new OneStageResource(uiRoot)
}

@Path("applications/{appId}/stages/{stageId: \\d+}/{attemptId: \\d+}")
def getStageAttempt(): OneStageAttemptResource= {
new OneStageAttemptResource(uiRoot)
}


@Path("applications/{appId}/stages/{stageId: \\d+}/{attemptId: \\d+}/taskSummary")
def getStageAttemptTaskSummary(): StageTaskSummary = {
new StageTaskSummary(uiRoot)
}

@Path("applications/{appId}/storage/rdd")
def getRdds(): AllRDDResource = {
new AllRDDResource(uiRoot)
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -16,41 +16,109 @@
*/
package org.apache.spark.status.api.v1

import javax.ws.rs.{GET, PathParam, Produces}
import javax.ws.rs._
import javax.ws.rs.core.MediaType

import org.apache.spark.SparkException
import org.apache.spark.scheduler.StageInfo
import org.apache.spark.ui.jobs.JobProgressListener

@Produces(Array(MediaType.APPLICATION_JSON))
private[v1] class OneStageResource(uiRoot: UIRoot) {

@GET
@Path("")
def stageData(
@PathParam("appId") appId: String,
@PathParam("stageId") stageId: Int
): Seq[StageData] = {
forStage(appId, stageId){ (listener,stageAttempts) =>
stageAttempts.map { case (status, stageInfo) =>
val stageUiData = listener.synchronized {
listener.stageIdToData.get((stageInfo.stageId, stageInfo.attemptId)).
getOrElse(throw new SparkException("failed to get full stage data for stage: " +
stageInfo.stageId + ":" + stageInfo.attemptId)
)
}
AllStagesResource.stageUiToStageData(status, stageInfo, stageUiData,
includeDetails = true)
}
}
}

@GET
@Path("/{attemptId: \\d+}")
def oneAttemptData(
@PathParam("appId") appId: String,
@PathParam("stageId") stageId: Int,
@PathParam("attemptId") attemptId: Int
): StageData = {
forStageAttempt(appId, stageId, attemptId) { case (listener, status, stageInfo) =>
val stageUiData = listener.synchronized {
listener.stageIdToData.get((stageInfo.stageId, stageInfo.attemptId)).
getOrElse(throw new SparkException("failed to get full stage data for stage: " +
stageInfo.stageId + ":" + stageInfo.attemptId)
)
}
AllStagesResource.stageUiToStageData(status, stageInfo, stageUiData,
includeDetails = true)
}
}

@GET
@Path("/{attemptId: \\d+}/taskSummary")
def stageData(
@PathParam("appId") appId: String,
@PathParam("stageId") stageId: Int,
@PathParam("attemptId") attemptId: Int,
@DefaultValue("0.05,0.25,0.5,0.75,0.95") @QueryParam("quantiles") quantileString: String
): TaskMetricDistributions = {
forStageAttempt(appId, stageId, attemptId) { case (listener, status, stageInfo) =>
val stageUiData = listener.synchronized {
listener.stageIdToData.get((stageInfo.stageId, stageInfo.attemptId)).
getOrElse(throw new SparkException("failed to get full stage data for stage: " +
stageInfo.stageId + ":" + stageInfo.attemptId)
)
}
//TODO error handling
val quantiles = quantileString.split(",").map{_.toDouble}
println("quantiles = " + quantiles.mkString(","))
AllStagesResource.taskMetricDistributions(stageUiData.taskData.values, quantiles)
}
}

def forStage[T](appId: String, stageId: Int)
(f: (JobProgressListener, Seq[(StageStatus, StageInfo)]) => T): T = {
uiRoot.withSparkUI(appId) { ui =>
val listener = ui.stagesTab.listener
val stageAndStatus = AllStagesResource.stagesAndStatus(ui)
val stageAttempts = stageAndStatus.flatMap { case (status, stages) =>
val matched = stages.filter{ stage => stage.stageId == stageId}
matched.map { status -> _ }
val matched = stages.filter { stage => stage.stageId == stageId}
matched.map {
status -> _
}
}
if (stageAttempts.isEmpty) {
throw new NotFoundException("unknown stage: " + stageId)
} else {
stageAttempts.map { case (status, stageInfo) =>
val stageUiData = listener.synchronized {
listener.stageIdToData.get((stageInfo.stageId, stageInfo.attemptId)).
getOrElse(throw new SparkException("failed to get full stage data for stage: " +
stageInfo.stageId + ":" + stageInfo.attemptId)
)
}
AllStagesResource.stageUiToStageData(status, stageInfo, stageUiData,
includeDetails = true)
}
f(ui.jobProgressListener, stageAttempts)
}
}
}

def forStageAttempt[T](appId: String, stageId: Int, attemptId: Int)
(f: (JobProgressListener, StageStatus, StageInfo) => T): T = {
forStage(appId, stageId) { case (listener, attempts) =>
val oneAttempt = attempts.filter{ case (status, stage) =>
stage.attemptId == attemptId
}.headOption
oneAttempt match {
case Some((status, stageInfo)) =>
f(listener, status, stageInfo)
case None =>
val stageAttempts = attempts.map { _._2.attemptId}
throw new NotFoundException(s"unknown attempt for stage $stageId. " +
s"Found attempts: ${stageAttempts.mkString("[", ",", "]")}")
}
}
}
}

This file was deleted.

0 comments on commit b2efcaf

Please sign in to comment.