接口_kafka_UserTransmit

接口_kafka_UserTransmit

springboot接口

com.test.MainApplication

package com.test;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

@SpringBootApplication
public class MainApplication  {
    public static void main(String[] args) {
        SpringApplication.run(MainApplication.class,args);
    }
}

com.test.controller.AvroTest

package com.test.controller;

import com.test.util.kafka_utils.KafkaUtils;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.web.bind.annotation.*;

@RestController
public class AvroTest {
    @PostMapping("/send)
    public String getAvroInfo(@RequestBody String avroInfo){
        System.out.println(avroInfo);
        Producer producer = KafkaUtils.getKafkaProducer();
        producer.send(new ProducerRecord<String, String>("user_message", avroInfo));

        return "接收post请求的数据
    }
}

UserTransmit

package com.test.app;

import com.test.bean.User;
import com.test.util.hdfsSink.HdfsUtils;
import com.test.util.kafka_utils.KafkaUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class UserTransmit {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        env.setParallelism(1);
        SingleOutputStreamOperator userDS = env.addSource(KafkaUtils.getFlinkKafkaConsumer("user_message"))
                .map(new MapFunction<String, User>() {

                    @Override
                    public User map(String s) throws Exception {
                        String[] split = s.split(",");
                        return new User(split[0], split[1], Long.valueOf(split[2])*1L);
                    }
                });
//filter
//        SingleOutputStreamOperator user_water_ds = userDS.assignTimestampsAndWatermarks(
//                WatermarkStrategy.
//                        <User>forBoundedOutOfOrderness(Duration.ofSeconds(10))
//                        .withTimestampAssigner((data, ts) -> data.getTransmit_time() * 1000L)
//        );
        SingleOutputStreamOperator<Tuple2<String,User>> tuple_user_ds = userDS.map(new MapFunction<User, Tuple2<String, User>>() {

            @Override
            public Tuple2<String, User> map(User user) throws Exception {
                return  Tuple2.of((user.getUserid()), user);
            }
        });
        SingleOutputStreamOperator<Tuple2<String, Long>> resultDS = tuple_user_ds
                .keyBy(value -> value.f0)
                .timeWindow(Time.days(1))
                .process(new ProcessWindowFunction<Tuple2<String, User>, Tuple2<String, Long>, String, TimeWindow>() {
                    Long sumTransmit = 0L;

                    @Override
                    public void process(String s, Context context, Iterable<Tuple2<String, User>> elements, Collector<Tuple2<String, Long>> out) throws Exception {
                        sumTransmit++;
                        out.collect(Tuple2.of(s, sumTransmit));
                       // System.out.println(Tuple2.of(s, sumTransmit));
                    }

                });
        resultDS.addSink(new HdfsUtils<User>().getTBucketingSink("hdfs://192.168.1.162:8020/UserData/"));

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值