SplitStreamTest
(拆分流测试)
package com.liao.chapter08
import com.liao.chapter05.{ClickSource, Event}
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.ProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector
object SplitStreamTest {
var maryTag = OutputTag[(String,String,Long)]("mary-tag")
var bobTag = OutputTag[(String,String,Long)]("bob-tag")
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val stream = env.addSource(new ClickSource)
val mary_Stream = stream.filter(_.user == "Mary")
val Bob_Stream = stream.filter(_.user == "Bob")
val else_Stream = stream.filter( data => data.user != "Mary" && data.user != "Bob" )
val elseStream = stream.process(new ProcessFunction[Event, Event] {
override def processElement(i: Event, context: ProcessFunction[Event, Event]#Context, collector: Collector[Event]): Unit = {
if (i.user == "Mary") {
context.output(maryTag, (i.user, i.url, i.timestamp))
}
else if (i.user == "Bob") {
context.output(bobTag, (i.user, i.url, i.timestamp))
}
else {
collector.collect(i)
}
}
})
elseStream.print("else")
elseStream.getSideOutput(maryTag).print("mary")
elseStream.getSideOutput(bobTag).print("bob")
env.execute()
}
}
UnionTest
package com.liao.chapter08
import com.liao.chapter05.Event
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.ProcessFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector
import sun.plugin2.message.EventMessage
object UnionTest {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
val stream1 = env.socketTextStream("hadoop002",7777)
.map(data => {
val fields = data.split(",")
Event(fields(0).trim,fields(1).trim,fields(2).trim.toLong)
})
.assignAscendingTimestamps(_.timestamp)
val stream2 = env.socketTextStream("hadoop002",8888)
.map(data => {
val fields = data.split(",")
Event(fields(0).trim,fields(1).trim,fields(2).trim.toLong)
})
.assignAscendingTimestamps(_.timestamp)
stream1.union(stream2)
.process(new ProcessFunction[Event, String] {
override def processElement(i: Event, context: ProcessFunction[Event, String]#Context, collector: Collector[String]): Unit = {
collector.collect(s"当前水位线:${context.timerService().currentWatermark()}")
}
})
.print()
env.execute()
}
}