SparkStreaming中的转换算子2--有状态的转换算子updateStateByKey
作者:互联网
- 将之前批次的状态保存,
package SparkStreaming.trans
import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}
object ByUpdateByKey {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setMaster("local[3]").setAppName("transform3")
val ssc = new StreamingContext(conf, Seconds(3))
ssc.checkpoint("hdfs://node1:9000/sparkstreaming")
val ds: DStream[String] = ssc.socketTextStream("node1", 44444, StorageLevel.MEMORY_ONLY)
val ds1: DStream[(String, Int)] = ds.flatMap(_.split(" ")).map((_, 1))
/*
(A, B)
A:之前批次处理得到的结果
B:当前批次处理得到的结果
*/
val ds2 = ds1.updateStateByKey((array: Seq[Int], state: Option[Int]) => {
var num: Int = state.getOrElse(0)
for (elem <- array) {
num += elem
}
Option(num)
})
ds2.print()
ssc.start()
ssc.awaitTermination()
}
}
标签:转换,val,--,org,Int,算子,apache,import,spark 来源: https://www.cnblogs.com/jsqup/p/16649406.html