import scala.collection.mutable
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.{Seconds, StreamingContext}
val lines = Seq("To be or not to be.", "That is the question.")
val rdd = sc.parallelize(lines)
val lines = mutable.Queue[RDD[String]]()
val streamingContext = new StreamingContext(sc, Seconds(10))
val dstream = streamingContext.queueStream(lines)
// append data to DStream
lines += sc.makeRDD(Seq("To be or not to be.", "That is the question."))
http://mkuthan.github.io/blog/2015/03/01/spark-unit-testing/
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.{Seconds, StreamingContext}
val lines = Seq("To be or not to be.", "That is the question.")
val rdd = sc.parallelize(lines)
val lines = mutable.Queue[RDD[String]]()
val streamingContext = new StreamingContext(sc, Seconds(10))
val dstream = streamingContext.queueStream(lines)
// append data to DStream
lines += sc.makeRDD(Seq("To be or not to be.", "That is the question."))
http://mkuthan.github.io/blog/2015/03/01/spark-unit-testing/
No comments:
Post a Comment