Skip to content

Commit

Permalink
remove println
Browse files Browse the repository at this point in the history
  • Loading branch information
dmgcodevil committed Feb 24, 2022
1 parent 353572c commit 0a4640b
Showing 1 changed file with 1 addition and 7 deletions.
8 changes: 1 addition & 7 deletions spark/src/main/scala/io/parapet/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -132,12 +132,10 @@ object SparkContext {
// todo replace clientId with workerId in MapResult ?
case NodeProcess.Req(_ /*workerId*/ , data) => Api(data)
}))
_ <- eval(println("step-1"))
node <- eval(new NodeProcess[F](nodeRef,
NodeProcess.Config(id, _address, _clusterServers), nodeInMapper.ref, zmqContext))
_ <- register(ProcessRef.SystemRef, nodeInMapper)
_ <- register(ProcessRef.SystemRef, node)
_ <- eval(println("step-2"))
workers <- _workers.map { workerId =>
val wp = new ClusterWorker[F](workerId, node.ref)
register(ProcessRef.SystemRef, wp) ++ eval(wp.ref)
Expand Down Expand Up @@ -176,12 +174,8 @@ object SparkContext {
class ClusterWorker[F[_]](id: String, nodeRef: ProcessRef) extends io.parapet.core.Process[F] {
override val ref: ProcessRef = ProcessRef(id)

import dsl._

override def handle: Receive = {
case cmd: Api =>
eval(println(s"send $cmd to $nodeRef worker=$id")) ++
NodeProcess.Req(id, cmd.toByteArray) ~> nodeRef
case cmd: Api => NodeProcess.Req(id, cmd.toByteArray) ~> nodeRef
}
}

Expand Down

0 comments on commit 0a4640b

Please sign in to comment.