其他分享
首页 > 其他分享> > 大数据系列-SPARK-STREAMING流数据window

大数据系列-SPARK-STREAMING流数据window

作者:互联网

大数据系列-SPARK-STREAMING流数据window

package com.test

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

//window
object SparkStreamingWindow {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setAppName("SparkStreamingWindow").setMaster("local[*]")
    val streamingContext = new StreamingContext(sparkConf, Seconds(5) /*采集周期*/)
    streamingContext.checkpoint("data/cpDir")

    val dstream: ReceiverInputDStream[String] = streamingContext.socketTextStream("localhost", 8600)
    val wordToMap = dstream.map((_, 1))
    //window的窗口范围是采集周期的整倍 例 10 = 5 * 2
    //默认window的滑动步长是采集周期,有重叠
    val windowDStream: DStream[(String, Int)] = wordToMap.window(Seconds(10) /*范围*/ , Seconds(10) /*步长*/)
    windowDStream.reduceByKey(_ + _).print

    //窗口范围>步长时减少重复计算
    wordToMap.reduceByKeyAndWindow(
      (x: Int, y: Int) => {
        x + y
      },
      (x: Int, y: Int) => {//去重
        x - y
      },
      Seconds(10) /*范围*/ ,
      Seconds(5) /*步长*/
    ).print()

    streamingContext.start()
    streamingContext.awaitTermination()

  }

}

标签:Int,val,Seconds,步长,STREAMING,window,streamingContext,SPARK
来源: https://blog.csdn.net/hudongdong2020/article/details/123621261