Skip to content

Commit

Permalink
fix indentation
Browse files Browse the repository at this point in the history
  • Loading branch information
squito committed Apr 28, 2015
1 parent 7bf1811 commit acb7ef6
Show file tree
Hide file tree
Showing 12 changed files with 218 additions and 253 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,8 @@ private[v1] class AllJobsResource(uiRoot: UIRoot) {

@GET
def jobsList(
@PathParam("appId") appId: String,
@QueryParam("status") statuses: JList[JobExecutionStatus]
): Seq[JobData] = {
@PathParam("appId") appId: String,
@QueryParam("status") statuses: JList[JobExecutionStatus]): Seq[JobData] = {
uiRoot.withSparkUI(appId) { ui =>
val statusToJobs: Seq[(JobExecutionStatus, Seq[JobUIData])] =
AllJobsResource.getStatusToJobs(ui)
Expand Down Expand Up @@ -69,10 +68,9 @@ private[v1] object AllJobsResource {
}

def convertJobData(
job: JobUIData,
listener: JobProgressListener,
includeStageDetails: Boolean
): JobData = {
job: JobUIData,
listener: JobProgressListener,
includeStageDetails: Boolean): JobData = {
listener.synchronized {
val lastStageInfo = listener.stageIdToInfo.get(job.stageIds.max)
val lastStageData = lastStageInfo.flatMap { s =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,7 @@ import org.apache.spark.ui.storage.StorageListener
private[v1] class AllRDDResource(uiRoot: UIRoot) {

@GET
def jobsList(
@PathParam("appId") appId: String
): Seq[RDDStorageInfo] = {
def jobsList(@PathParam("appId") appId: String): Seq[RDDStorageInfo] = {
uiRoot.withSparkUI(appId) { ui =>
val storageStatusList = ui.storageListener.storageStatusList
val rddInfos = ui.storageListener.rddInfoList
Expand All @@ -55,11 +53,10 @@ private[spark] object AllRDDResource {
}

def getRDDStorageInfo(
rddId: Int,
rddInfo: RDDInfo,
storageStatusList: Seq[StorageStatus],
includeDetails: Boolean
): RDDStorageInfo = {
rddId: Int,
rddInfo: RDDInfo,
storageStatusList: Seq[StorageStatus],
includeDetails: Boolean): RDDStorageInfo = {
val workers = storageStatusList.map { (rddId, _) }
val blockLocations = StorageUtils.getRddBlockLocations(rddId, storageStatusList)
val blocks = storageStatusList
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import javax.ws.rs.core.MediaType

import org.apache.spark.executor.{InputMetrics => InternalInputMetrics, OutputMetrics => InternalOutputMetrics, ShuffleReadMetrics => InternalShuffleReadMetrics, ShuffleWriteMetrics => InternalShuffleWriteMetrics, TaskMetrics => InternalTaskMetrics}
import org.apache.spark.scheduler.{AccumulableInfo => InternalAccumulableInfo, StageInfo}
import org.apache.spark.status.api._
import org.apache.spark.ui.SparkUI
import org.apache.spark.ui.jobs.UIData.{StageUIData, TaskUIData}
import org.apache.spark.util.Distribution
Expand All @@ -32,8 +31,8 @@ private[v1] class AllStagesResource(uiRoot: UIRoot) {

@GET
def stageList(
@PathParam("appId") appId: String,
@QueryParam("status") statuses: JList[StageStatus]
@PathParam("appId") appId: String,
@QueryParam("status") statuses: JList[StageStatus]
): Seq[StageData] = {
uiRoot.withSparkUI(appId) { ui =>
val listener = ui.jobProgressListener
Expand All @@ -60,11 +59,10 @@ private[v1] class AllStagesResource(uiRoot: UIRoot) {

private[v1] object AllStagesResource {
def stageUiToStageData(
status: StageStatus,
stageInfo: StageInfo,
stageUiData: StageUIData,
includeDetails: Boolean
): StageData = {
status: StageStatus,
stageInfo: StageInfo,
stageUiData: StageUIData,
includeDetails: Boolean): StageData = {

val taskData = if (includeDetails) {
Some(stageUiData.taskData.map { case (k, v) => k -> convertTaskData(v) } )
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@ private[v1] class ApplicationListResource(uiRoot: UIRoot) {

@GET
def appList(
@QueryParam("status") status: JList[ApplicationStatus],
@DefaultValue("2010-01-01") @QueryParam("minDate") minDate: SimpleDateParam,
@DefaultValue("3000-01-01") @QueryParam("maxDate") maxDate: SimpleDateParam
): Iterator[ApplicationInfo] = {
@QueryParam("status") status: JList[ApplicationStatus],
@DefaultValue("2010-01-01") @QueryParam("minDate") minDate: SimpleDateParam,
@DefaultValue("3000-01-01") @QueryParam("maxDate") maxDate: SimpleDateParam)
: Iterator[ApplicationInfo] = {
val allApps = uiRoot.getApplicationInfoList
val adjStatus = {
if (status.isEmpty) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,7 @@ import org.apache.spark.ui.exec.ExecutorsPage
private[v1] class ExecutorListResource(uiRoot: UIRoot) {

@GET
def jobsList(
@PathParam("appId") appId: String
): Seq[ExecutorSummary] = {
def jobsList(@PathParam("appId") appId: String): Seq[ExecutorSummary] = {
uiRoot.withSparkUI(appId) { ui =>
val listener = ui.executorsListener
val storageStatusList = listener.storageStatusList
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,33 +52,33 @@ private[v1] class JacksonMessageWriter extends MessageBodyWriter[Object]{
mapper.setDateFormat(JacksonMessageWriter.makeISODateFormat)

override def isWriteable(
aClass: Class[_],
`type`: Type,
annotations: Array[Annotation],
mediaType: MediaType): Boolean = {
true
aClass: Class[_],
`type`: Type,
annotations: Array[Annotation],
mediaType: MediaType): Boolean = {
true
}

override def writeTo(
t: Object,
aClass: Class[_],
`type`: Type,
annotations: Array[Annotation],
mediaType: MediaType,
multivaluedMap: MultivaluedMap[String, AnyRef],
outputStream: OutputStream): Unit = {
t: Object,
aClass: Class[_],
`type`: Type,
annotations: Array[Annotation],
mediaType: MediaType,
multivaluedMap: MultivaluedMap[String, AnyRef],
outputStream: OutputStream): Unit = {
t match {
case ErrorWrapper(err) => outputStream.write(err.getBytes("utf-8"))
case _ => mapper.writeValue(outputStream, t)
}
}

override def getSize(
t: Object,
aClass: Class[_],
`type`: Type,
annotations: Array[Annotation],
mediaType: MediaType): Long = {
t: Object,
aClass: Class[_],
`type`: Type,
annotations: Array[Annotation],
mediaType: MediaType): Long = {
-1L
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,7 @@ import org.apache.spark.ui.jobs.UIData.JobUIData
private[v1] class OneJobResource(uiRoot: UIRoot) {

@GET
def jobsList(
@PathParam("appId") appId: String,
@PathParam("jobId") jobId: Int
): JobData = {
def jobsList(@PathParam("appId") appId: String, @PathParam("jobId") jobId: Int): JobData = {
uiRoot.withSparkUI(appId) { ui =>
val statusToJobs: Seq[(JobExecutionStatus, Seq[JobUIData])] =
AllJobsResource.getStatusToJobs(ui)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,8 @@ private[v1] class OneRDDResource(uiRoot: UIRoot) {

@GET
def rddData(
@PathParam("appId") appId: String,
@PathParam("rddId") rddId: Int
): RDDStorageInfo = {
@PathParam("appId") appId: String,
@PathParam("rddId") rddId: Int): RDDStorageInfo = {
uiRoot.withSparkUI(appId) { ui =>
AllRDDResource.getRDDStorageInfo(rddId, ui.storageListener, true).getOrElse(
throw new NotFoundException(s"no rdd found w/ id $rddId")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,8 @@ private[v1] class OneStageResource(uiRoot: UIRoot) {
@GET
@Path("")
def stageData(
@PathParam("appId") appId: String,
@PathParam("stageId") stageId: Int
): Seq[StageData] = {
@PathParam("appId") appId: String,
@PathParam("stageId") stageId: Int): Seq[StageData] = {
withStage(appId, stageId){ stageAttempts =>
stageAttempts.map { stage =>
AllStagesResource.stageUiToStageData(stage.status, stage.info, stage.ui,
Expand All @@ -46,10 +45,9 @@ private[v1] class OneStageResource(uiRoot: UIRoot) {
@GET
@Path("/{attemptId: \\d+}")
def oneAttemptData(
@PathParam("appId") appId: String,
@PathParam("stageId") stageId: Int,
@PathParam("attemptId") attemptId: Int
): StageData = {
@PathParam("appId") appId: String,
@PathParam("stageId") stageId: Int,
@PathParam("attemptId") attemptId: Int): StageData = {
withStageAttempt(appId, stageId, attemptId) { stage =>
AllStagesResource.stageUiToStageData(stage.status, stage.info, stage.ui,
includeDetails = true)
Expand All @@ -59,11 +57,11 @@ private[v1] class OneStageResource(uiRoot: UIRoot) {
@GET
@Path("/{attemptId: \\d+}/taskSummary")
def stageData(
@PathParam("appId") appId: String,
@PathParam("stageId") stageId: Int,
@PathParam("attemptId") attemptId: Int,
@DefaultValue("0.05,0.25,0.5,0.75,0.95") @QueryParam("quantiles") quantileString: String
): TaskMetricDistributions = {
@PathParam("appId") appId: String,
@PathParam("stageId") stageId: Int,
@PathParam("attemptId") attemptId: Int,
@DefaultValue("0.05,0.25,0.5,0.75,0.95") @QueryParam("quantiles") quantileString: String)
: TaskMetricDistributions = {
withStageAttempt(appId, stageId, attemptId) { stage =>
val quantiles = quantileString.split(",").map { s =>
try {
Expand All @@ -80,13 +78,12 @@ private[v1] class OneStageResource(uiRoot: UIRoot) {
@GET
@Path("/{attemptId: \\d+}/taskList")
def taskList(
@PathParam("appId") appId: String,
@PathParam("stageId") stageId: Int,
@PathParam("attemptId") attemptId: Int,
@DefaultValue("0") @QueryParam("offset") offset: Int,
@DefaultValue("20") @QueryParam("length") length: Int,
@DefaultValue("ID") @QueryParam("sortBy") sortBy: TaskSorting
): Seq[TaskData] = {
@PathParam("appId") appId: String,
@PathParam("stageId") stageId: Int,
@PathParam("attemptId") attemptId: Int,
@DefaultValue("0") @QueryParam("offset") offset: Int,
@DefaultValue("20") @QueryParam("length") length: Int,
@DefaultValue("ID") @QueryParam("sortBy") sortBy: TaskSorting): Seq[TaskData] = {
withStageAttempt(appId, stageId, attemptId) { stage =>
val tasks = stage.ui.taskData.values.map{AllStagesResource.convertTaskData}.toIndexedSeq
.sorted(OneStageResource.ordering(sortBy))
Expand Down
Loading

0 comments on commit acb7ef6

Please sign in to comment.