diff --git a/examples/src/main/python/streaming/wordcount.pyc b/examples/src/main/python/streaming/wordcount.pyc new file mode 100644 index 0000000000000..db93702361f47 Binary files /dev/null and b/examples/src/main/python/streaming/wordcount.pyc differ diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonTransformedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonTransformedDStream.scala index ff70483b771a4..bc07e09ec6d03 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonTransformedDStream.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonTransformedDStream.scala @@ -1,3 +1,5 @@ +/* + package org.apache.spark.streaming.api.python import org.apache.spark.Accumulator @@ -10,11 +12,8 @@ import org.apache.spark.streaming.dstream.DStream import scala.reflect.ClassTag -/** - * Created by ken on 7/15/14. - */ class PythonTransformedDStream[T: ClassTag]( - parents: Seq[DStream[T]], + parent: DStream[T], command: Array[Byte], envVars: JMap[String, String], pythonIncludes: JList[String], @@ -30,8 +29,14 @@ class PythonTransformedDStream[T: ClassTag]( //pythonDStream compute override def compute(validTime: Time): Option[RDD[Array[Byte]]] = { - val parentRDDs = parents.map(_.getOrCompute(validTime).orNull).toSeq - Some() + +// val parentRDDs = parents.map(_.getOrCompute(validTime).orNull).toSeq +// parents.map(_.getOrCompute(validTime).orNull).to +// parent = parents.head.asInstanceOf[RDD] +// Some() } - val asJavaDStream = JavaDStream.fromDStream(this) + + val asJavaDStream = JavaDStream.fromDStream(this) } + +*/