package com.suning.pcdnas.flink.job;
import com.alibaba.fastjson.JSON;
import com.suning.pcdnas.flink.model.BandwidthTime;
import com.suning.pcdnas.flink.model.GatherInfo;
import com.suning.pcdnas.flink.operate.BandwidthDomainAreaSplitter;
import com.suning.pcdnas.flink.operate.BandwidthDomainMonitorSplitter;
import com.suning.pcdnas.flink.operate.BandwidthDomainSplitter;
import com.suning.pcdnas.flink.operate.BandwidthDomainUserSplitter;
import com.suning.pcdnas.flink.operate.CustomWatermarkMark;
import com.suning.pcdnas.flink.sinks.MysqlDomainAreaSinks;
import com.suning.pcdnas.flink.sinks.MysqlDomainMonitorSinks;
import com.suning.pcdnas.flink.sinks.MysqlDomainSinks;
import com.suning.pcdnas.flink.sinks.MysqlDomainUserSinks;
import com.suning.pcdnas.flink.utils.ConfigUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;
import org.apache.flink.util.Collector;
import java.math.BigDecimal;
import java.util.Properties;
/**
* peer flink 5分钟域名带宽,5分钟域名带宽区域运营商实时分析任务
*/
public class PeerFlinkTask {
private PeerFlinkTask(){
}
public static void main(String[] args) throws Exception {
Properties config = ConfigUtil.getConfig();
if (config == null || config.isEmpty()) {
System.exit(1);
}
final String servers = config.getProperty("kafka.borkers");
final String zookeeper = config.getProperty("kafka.zklist");
final String groupId = config.getProperty("kafka.peer.groupid");
final String topic = config.getProperty("kafka.peer.topic");
final String dataSourceName = config.getProperty("sql.datasource");
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(5000);
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", servers);
properties.setProperty("zookeeper.connect", zookeeper);
properties.setProperty("group.id", groupId);
FlinkKafkaConsumer08<String> consumer = new FlinkKafkaConsumer08<>(topic, new SimpleStringSchema(), properties);
DataStream<GatherInfo> sourceStream = env.addSource(consumer).map(new MapFunction<String, GatherInfo>() {
@Override
public GatherInfo map(String content) throws Exception {
return JSON.parseObject(content, GatherInfo.class);
}
}).assignTimestampsAndWatermarks(new CustomWatermarkMark());
/*DataStream<Tuple2<String, BandwidthTime>> results1 = sourceStream
.flatMap(new BandwidthDomainSplitter()).keyBy(0).timeWindow(Time.minutes(5))
.apply(new WindowFunction<Tuple2<String, Double>, Tuple2<String, BandwidthTime>, Tuple, TimeWindow>() {
@Override
public void apply(Tuple tuple, TimeWindow timeWindow, Iterable<Tuple2<String, Double>> input, Collector<Tuple2<String, BandwidthTime>> collector)
throws Exception {
BigDecimal bigDecimal = BigDecimal.ZERO;
for (Tuple2<String, Double> record : input) {
bigDecimal = bigDecimal.add(BigDecimal.valueOf(record.f1));
}
Tuple2<String, Double> result = input.iterator().next();
collector.collect(new Tuple2<>(result.f0,new BandwidthTime(bigDecimal.doubleValue(),timeWindow.getEnd())));
}
});
DataStream<Tuple2<String, BandwidthTime>> results2 = sourceStream
.flatMap(new BandwidthDomainAreaSplitter()).keyBy(0).timeWindow(Time.minutes(5))
.apply(new WindowFunction<Tuple2<String,Double>, Tuple2<String, BandwidthTime>, Tuple, TimeWindow>() {
@Override
public void apply(Tuple tuple, TimeWindow timeWindow, Iterable<Tuple2<String, Double>> input, Collector<Tuple2<String, BandwidthTime>> collector)
throws Exception {
BigDecimal bigDecimal = BigDecimal.ZERO;
for (Tuple2<String, Double> record: input) {
bigDecimal = bigDecimal.add(BigDecimal.valueOf(record.f1));
}
Tuple2<String, Double> result = input.iterator().next();
collector.collect(new Tuple2<>(result.f0,new BandwidthTime(bigDecimal.doubleValue(),timeWindow.getEnd())));
}
});*/
DataStream<Tuple2<String, BandwidthTime>> sourceBandwidthDomainMonitor = sourceStream.flatMap(new BandwidthDomainMonitorSplitter()).keyBy(0).timeWindow(Time.minutes(5)).apply(new WindowFunction<Tuple2<String,Double>, Tuple2<String, BandwidthTime>, Tuple, TimeWindow>() {
@Override
public void apply(Tuple tuple, TimeWindow timeWindow, Iterable<Tuple2<String, Double>> input, Collector<Tuple2<String, BandwidthTime>> collector) throws Exception {
BigDecimal bigDecimal = BigDecimal.ZERO;
for (Tuple2<String, Double> record : input) {
bigDecimal = bigDecimal.add(BigDecimal.valueOf(record.f1));
}
Tuple2<String, Double> result = input.iterator().next();
collector.collect(new Tuple2<>(result.f0,new BandwidthTime(bigDecimal.doubleValue(),timeWindow.getEnd())));
}
});
DataStream<Tuple2<String, BandwidthTime>> sourceBandwidthDomainUser = sourceStream.flatMap(new BandwidthDomainUserSplitter()).keyBy(0).timeWindow(Time.minutes(5)).apply(new WindowFunction<Tuple2<String,Double>, Tuple2<String, BandwidthTime>, Tuple, TimeWindow>() {
@Override
public void apply(Tuple tuple, TimeWindow timeWindow, Iterable<Tuple2<String, Double>> input, Collector<Tuple2<String, BandwidthTime>> collector) throws Exception {
BigDecimal bigDecimal = BigDecimal.ZERO;
for (Tuple2<String, Double> record : input) {
bigDecimal = bigDecimal.add(BigDecimal.valueOf(record.f1));
}
Tuple2<String, Double> result = input.iterator().next();
collector.collect(new Tuple2<>(result.f0,new BandwidthTime(bigDecimal.doubleValue(),timeWindow.getEnd())));
}
});
sourceBandwidthDomainMonitor.addSink(new MysqlDomainMonitorSinks(dataSourceName));
sourceBandwidthDomainUser.addSink(new MysqlDomainUserSinks(dataSourceName));
/*results1.addSink(new MysqlDomainSinks(dataSourceName));
results2.addSink(new MysqlDomainAreaSinks(dataSourceName));*/
env.execute("Flink Streaming PeerFlinkTask");
}
}