文章目录
apply
public class WindowApply {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(2);
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
Properties prop = new Properties();
prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.232.211:9092");
prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"group_1");
prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
prop.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
prop.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"latest");
DataStreamSource<String> inputStream = env.addSource(new FlinkKafkaConsumer011<String>("sensor",
new SimpleStringSchema(),
prop));
SingleOutputStreamOperator<SensorReading> mapStream = inputStream.map(new MapFunction<String, SensorReading>() {
@Override
public SensorReading map(String s) throws Exception {
String[] split = s.split(",");
return new SensorReading(split[0], Long.parseLong(split[1]), Double.parseDouble(split[2]));
}
}).assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<SensorReading>(Time.seconds(2)) {
@Override
public long extractTimestamp(SensorReading sensorReading) {
return sensorReading.getTimestamp() * 1000L;
}
});
mapStream.print("inputStream");
SingleOutputStreamOperator<Tuple4<String, Long, Long, Integer>> id = mapStream.keyBy("id")
.timeWindow(Time.seconds(15))
.apply(new WindowFunction<SensorReading, Tuple4<String, Long, Long, Integer>, Tuple, TimeWindow>() {
@Override
//窗口结束开始操作
public void apply(Tuple tuple,
TimeWindow window,
Iterable<SensorReading> input,
Collector<Tuple4<String, Long, Long, Integer>> out) throws Exception {
String key = tuple.getField(0);//key sensor_1
long start = window.getStart();
long end = window.getEnd();
Iterator<SensorReading> iterator = input.iterator();
int size = IteratorUtils.toList(iterator).size();
Tuple4<String, Long, Long, Integer> returnValue = new Tuple4<>(key, start, end, size);
out.collect(returnValue);
}
});
id.print("apply");
env.execute("apply");
}
}