java 最近十秒平均计数量_flink统计根据账号每30秒 金额的平均值

package com.zetyun.streaming.flink;

import org.apache.flink.api.common.functions.MapFunction;

import org.apache.flink.api.java.tuple.Tuple;

import org.apache.flink.api.java.tuple.Tuple2;

import org.apache.flink.api.java.tuple.Tuple3;

import org.apache.flink.api.java.utils.ParameterTool;

import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;

import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;

import org.apache.flink.streaming.api.TimeCharacteristic;

import org.apache.flink.streaming.api.datastream.KeyedStream;

import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;

import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;

import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;

import org.apache.flink.streaming.api.functions.windowing.WindowFunction;

import org.apache.flink.streaming.api.watermark.Watermark;

import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;

import org.apache.flink.streaming.api.windowing.time.Time;

import org.apache.flink.streaming.api.windowing.windows.TimeWindow;

import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;

import org.apache.flink.streaming.util.serialization.JSONDeserializationSchema;

import org.apache.flink.util.Collector;

import javax.annotation.Nullable;

import java.text.ParseException;

import java.text.SimpleDateFormat;

import java.util.Date;

import java.util.Iterator;

import java.util.Properties;

/**

* Created by jyt on 2018/4/10.

* 基于账号计算每30秒 金额的平均值

*/

public class EventTimeAverage {

public static void main(String[] args) throws Exception {

final ParameterTool parameterTool = ParameterTool.fromArgs(args);

String topic = parameterTool.get("topic", "accountId-avg");

Properties properties = parameterTool.getProperties();

properties.setProperty("bootstrap.servers", "192.168.44.101:9092");

final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

ObjectMapper objectMapper = new ObjectMapper();

SingleOutputStreamOperator source = env.addSource(new FlinkKafkaConsumer010(

topic,

new JSONDeserializationSchema(),

properties));

//设置WaterMarks方式一

/*SingleOutputStreamOperator objectNodeOperator = source.assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor(Time.seconds(15)) {

@Override

public long extractTimestamp(ObjectNode element) {

SimpleDateFormat format = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");

Date eventTime = null;

try {

eventTime = format.parse(element.get("eventTime").asText());

} catch (ParseException e) {

e.printStackTrace();

}

return eventTime.getTime();

}

});*/

//设置WaterMarks方式二

SingleOutputStreamOperator objectNodeOperator = source.assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks() {

public long currentMaxTimestamp = 0L;

public static final long maxOutOfOrderness = 10000L;//最大允许的乱序时间是10s

Watermark a = null;

SimpleDateFormat format = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");

@Nullable

@Override

public Watermark getCurrentWatermark() {

a = new Watermark(currentMaxTimestamp - maxOutOfOrderness);

return a;

}

@Override

public long extractTimestamp(ObjectNode jsonNodes, long l) {

String time = jsonNodes.get("eventTime").asText();

long timestamp = 0;

try {

timestamp = format.parse(time).getTime();

} catch (ParseException e) {

e.printStackTrace();

}

currentMaxTimestamp = Math.max(timestamp, currentMaxTimestamp);

return timestamp;

}

});

KeyedStream, Tuple> keyBy = objectNodeOperator.map(new MapFunction>() {

@Override

public Tuple3 map(ObjectNode jsonNodes) throws Exception {

System.out.println(jsonNodes.get("accountId").asText() + "==map====" + jsonNodes.get("amount").asDouble() + "===map===" + jsonNodes.get("eventTime").asText());

return new Tuple3(jsonNodes.get("accountId").asText(), jsonNodes.get("amount").asDouble(), jsonNodes.get("eventTime").asText());

}

}).keyBy(0);

SingleOutputStreamOperator apply = keyBy.window(TumblingEventTimeWindows.of(Time.seconds(30))).apply(new WindowFunction, Object, Tuple, TimeWindow>() {

@Override

public void apply(Tuple tuple, TimeWindow timeWindow, Iterable> iterable, Collector collector) throws Exception {

Iterator> iterator = iterable.iterator();

int count =0;

double num = 0.0;

///Tuple2 result = null;

Tuple3 next = null;

String accountId = null ;

while (iterator.hasNext()) {

next = iterator.next();

System.out.println(next);

accountId=next.f0;

num += next.f1;

count++;

}

if (next != null) {

collector.collect(new Tuple2(accountId,num/count));

}

}

});

apply.print();

//apply.addSink(new FlinkKafkaProducer010("192.168.44.101:9092","wiki-result",new SimpleStringSchema()));

env.execute("AverageDemo");

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值