springboot集成flink和mybatis

flink有一套自己的流程处理模式 不能随意转换其类型 目前集成的mybatis也就算是半集成,还没有思路去集成mybatis-Plus

上代码:

目前先会放上一层接口调用流程来记录

entity:

import com.baomidou.mybatisplus.annotation.TableName;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import org.springframework.stereotype.Component;

import java.io.Serializable;

/**
 * <p>
 * description:学生实体类
 * </p>
 *
 *@author Bruce 2021/12/02 13:43
 * @program demo
 */
@Component
@Data
@ToString
//@NoArgsConstructor
//@AllArgsConstructor
@TableName("t_student")
public class Student implements Serializable {


    private static final long serialVersionUID = 5768417941637790054L;
    private String id;
    private String name;
    private Integer age;

    public String getId() {
        return id;
    }

    public void setId(String id) {
        this.id = id;
    }

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public Integer getAge() {
        return age;
    }

    public void setAge(Integer age) {
        this.age = age;
    }

    public Student(String id, String name, Integer age) {
        this.id = id;
        this.name = name;
        this.age = age;
    }

    public Student() {
    }

    @Override
    public String toString() {
        return "Student{" +
                "id='" + id + '\'' +
                ", name='" + name + '\'' +
                ", age=" + age +
                '}';
    }
}

DAO:

import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.founder.bigdata.compute.demo.bean.Student;
import org.apache.ibatis.annotations.Param;
import org.springframework.stereotype.Repository;


@Repository
public interface StudentMapper extends BaseMapper<Student> {

    Student selectStudents(@Param("id")String id);

    int addStudent(Student student);

}
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.founder.bigdata.compute.demo.mapper.StudentMapper">
    <select id="selectStudents" resultType="com.founder.bigdata.compute.demo.bean.Student">
        select id id,`name` name,age age
        from t_student
        where id = #{id}
        limit 1
    </select>

    <insert id="addStudent" parameterType="com.founder.bigdata.compute.demo.bean.Student">
        insert into t_student(
            id,
            name,
            age
        )
        value(
            #{id},
            #{name}
            #{age}
        )
    </insert>

</mapper>

service:

import com.founder.bigdata.compute.demo.bean.Student;
import com.founder.bigdata.compute.demo.mapper.StudentMapper;
import com.founder.bigdata.compute.demo.utils.MybatisSessionFactory;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.ibatis.session.SqlSession;
import org.springframework.beans.factory.annotation.Autowired;

public class StudentDBUtils extends RichSinkFunction<Student> {
    SqlSession sqlSession = null;
    @Autowired
    StudentMapper mapper;

    @Override
    public void open(Configuration parameters) throws Exception {
        super.open(parameters);
        this.sqlSession = MybatisSessionFactory.getSqlSessionFactory().openSession();
        this.mapper = sqlSession.getMapper(StudentMapper.class);
    }

    @Override
    public void close() throws Exception {
        if (this.sqlSession != null)
            this.sqlSession.close();
    }

    @Override
    public void invoke(Student value, Context context) throws Exception {
        System.out.println(value);
        Student student = this.mapper.selectById(value.getId());
        if (student == null) {
            this.mapper.insert(value);
        } else {
            this.mapper.updateById(value);
        }
        this.sqlSession.commit();
    }

}
import com.founder.bigdata.compute.demo.bean.Student;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.List;

/**
 * <p>
 * description:
 * </p>
 *
 *@author Bruce 2021/12/05 9:03
 * @program demo
 */
public interface TestFlinkS {
    /**
     * 测试接口
     */
    void test () throws Exception;

    void TestNCConsumer(StreamExecutionEnvironment env);

    List<Student> getStudentList(String id);

    int addST() throws Exception;

}

import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.founder.bigdata.compute.demo.bean.Student;
import com.founder.bigdata.compute.demo.dao.TestFlinkS;
import com.founder.bigdata.compute.demo.mapper.StudentMapper;
import com.founder.bigdata.compute.demo.utils.MySQLSource;
import com.founder.bigdata.compute.demo.utils.MySQLSourceCopy;
import org.apache.commons.compress.utils.Lists;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.AggregateOperator;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.api.java.operators.PartitionOperator;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.stream.Collectors;


/**
 * <p>
 * description:
 * </p>
 *
 *@author Bruce 2021/12/05 9:12
 * @program demo
 */
@Service
public class TestFlinkImpl implements TestFlinkS {

    @Autowired
    private StudentMapper studentMapper;

    @Override
    public void test() {
        System.out.println("=========  流程开始   >>>>>>>>>  演示Flink Job  <<<<<<<<<<<<<<   ========== ");
        //1.准备环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //2.准备数据
        DataStream<Student> studentDataStreamSource = env.addSource(new MySQLSource());



        studentDataStreamSource.print();

        //3.数据处理转换(去重操作)
        SingleOutputStreamOperator<String> outputStreamOperator = studentDataStreamSource.keyBy(Student::getName).process(new DataDistinct());



        //4.输出结果
        outputStreamOperator.print();
        try {
            //5.触发执行
            env.execute();
        } catch (Exception e) {
            System.out.println("Error executing flink job: " + e.getMessage());
        }
        System.out.println("******演示结束******");
    }

    @Override
    public void TestNCConsumer(StreamExecutionEnvironment env) {

        int port = 9000;
//        DataStreamSource<String> text = env.socketTextStream("127.0.0.1", port);
        DataStreamSource<String> text = env.socketTextStream("192.168.56.1", port);
        // 连接socket获取输入的数据
        DataStream<Student> studentDataStreamSource = env.addSource(new MySQLSourceCopy());

        studentDataStreamSource.print();
//        try {
//            // 将数据插入库中
//            text.flatMap(new FlatMapFunction<String, String>() {
//                public void flatMap(String value, Collector<String> out) throws Exception {
//                    String[] splits = value.split("\\s");
//                    int i = 0;
//                    for (String word:splits) {
                    out.collect(new Student(word,1L));
//
//                        QueryWrapper<Student> qw = new QueryWrapper<>();
//                        qw.eq("name",word);
//                        List<Student> students = studentMapper.selectList(qw);
//                        if (students==null){
//                            Student student = new Student();
//                            student.setId(i+++"");
//                            student.setAge(i++);
//                            student.setName(word);
//                            studentMapper.insert(student);
//                        }
//                    }
//                }
//            });
//        }catch (Exception e){
//            e.printStackTrace();
//            System.out.println("没数据!");
//        }finally {
//            System.out.println("再会!");
//        }



    }

    @Override
    public List<Student> getStudentList(String id) {
        // 连接socket获取输入的数据
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        QueryWrapper<Student> qw = new QueryWrapper<>();
        qw.eq("id",id);
        List<Student> students = this.studentMapper.selectList(qw);
        List<String> ids = students.stream().map(Student::getId).collect(Collectors.toList());
//        System.out.println(ids.toString());
        DataStream<String> dataStream = env.fromCollection(Arrays.asList("1","2"));

//        DataStream<Student> dataStream = env.fromCollection(students);
//        dataStream.flatMap(new FlatMapFunction<Student, Student>() {
//            @Override
//            public void flatMap(Student student, Collector<Student> collector) throws Exception {
//                int i = 0;
//                for (Student s : students) {
//                    System.out.println(s.getName());
//                    s.setName(s.getName()+""+i++);
//                    System.out.println(s.getName());
//                    i += 1;
//                    collector.collect(s);
//                }
//
//            }
//        });
//        dataStream.map(new MapFunction<Student, Object>() {
//            @Override
//            public Object map(Student student) throws Exception {
//                return student;
//            }
//        });

        dataStream.print();


        return students;
    }

    @Override
    public int addST() throws Exception {
        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        String inputPath = "D:\\xunLeiDownLoad\\1.txt";
        Random r = new Random();
        int i = r.nextInt(1000);
        String outPath = "D:\\xunLeiDownLoad\\"+i+".txt";
        DataSource<String> source = env.readTextFile(inputPath);


        ArrayList<Student> res = Lists.newArrayList();


        MapOperator<String, Student> parallelism = source.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public void flatMap(String s, Collector<String> out) throws Exception {
                String[] f = s.split(" ");
                for (String s1 : f) {
                    out.collect(s1);
                }
            }
        }).map(new MapFunction<String, Student>() {
            @Override
            public Student map(String s) throws Exception {
                Student student = new Student();
                student.setName(s);
                student.setId(i + "");
                student.setAge(i);
//                res.add(student);
//                studentMapper.addStudent(student);
                return student;
            }
        }).setParallelism(1);


        res.parallelStream().forEach(re->{
            this.studentMapper.insert(re);
        });

//        parallelism.writeAsCsv(outPath);
        parallelism.print();

        System.out.println("=============================================");

        List<Student> students = this.studentMapper.selectList(new QueryWrapper<>());
        System.out.println(students.toString());
        return 1;
    }

    public void getStudentFiledDistinct(Integer type,DataStream<Student> studentDataStreamSource) {
        //1.准备环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //3.数据处理转换(去重操作)
        if (type == 0) {
            DataStream<String> outputStreamOperator = studentDataStreamSource.keyBy("id").process(new DataDistinct());
            //4.输出结果
            outputStreamOperator.print();
        } else if (type == 1) {
            DataStream<String> outputStreamOperator = studentDataStreamSource.keyBy("name").process(new DataDistinct());
            outputStreamOperator.print();
        } else if (type == 2) {
            DataStream<String> outputStreamOperator = studentDataStreamSource.keyBy("age").process(new DataDistinct());
            outputStreamOperator.print();
        } else if (type == 3) {
            DataStream<String> outputStreamOperator = studentDataStreamSource.keyBy("id", "name").process(new DataDistinct());
            outputStreamOperator.print();
        } else if (type == 4) {
            DataStream<String> outputStreamOperator = studentDataStreamSource.keyBy("id", "age").process(new DataDistinct());
            outputStreamOperator.print();
        } else if (type == 5) {
            DataStream<String> outputStreamOperator = studentDataStreamSource.keyBy("name", "age").process(new DataDistinct());
            outputStreamOperator.print();
        } else{
            DataStream<String> outputStreamOperator = studentDataStreamSource.keyBy("id", "age", "name").process(new DataDistinct());
            outputStreamOperator.print();
        }
        try {
            //5.触发执行
            env.execute();
        } catch (Exception e) {
            System.out.println("Error executing flink job: " + e.getMessage());
        }
    }

jdbc:

import com.founder.bigdata.compute.demo.bean.Student;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;

/**
 * <p>
 * description:
 * </p>
 *
 *@author Bruce 2021/12/02 15:17
 * @program demo
 */
public class MySQLSource extends RichParallelSourceFunction<Student> {

    private Connection conn = null;
    private PreparedStatement ps = null;

    @Override
    public void open(Configuration parameters) throws Exception {
        //加载驱动,开启连接
        conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/femo", "root", "123456");
        String sql = "select id,name,age from `t_student`";
        ps = conn.prepareStatement(sql);
    }

//    private boolean flag = true;

    @Override
    public void run(SourceContext ctx) throws Exception {
        ResultSet resultSet = ps.executeQuery();
        while (resultSet.next()) {
            String id = resultSet.getString("id");
            String name = resultSet.getString("name");
            int age = resultSet.getInt("age");
            //封装数据到student集合中
            ctx.collect(new Student(id, name, age));
        }
//        TimeUnit.SECONDS.sleep(5);
    }

    /**
     * 取消任务 执行cancel
     */
    @Override
    public void cancel() {
//        flag = false;
    }

    /**
     * 结束任务,关闭连接
     * @throws Exception
     */
    @Override
    public void close() throws Exception {
        if (conn != null) {
            conn.close();
        }
        if (ps != null) {
            ps.close();
        }
    }
}

mybatis半集成:

import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.founder.bigdata.compute.demo.bean.Student;
import com.founder.bigdata.compute.demo.mapper.StudentMapper;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import org.apache.ibatis.session.SqlSession;
import org.springframework.beans.factory.annotation.Autowired;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.List;

/**
 * <p>
 * description:
 * </p>
 *
 *@author Bruce 2021/12/02 15:17
 * @program demo
 */
public class MySQLSourceCopy extends RichParallelSourceFunction<Student> {

    private Connection conn = null;
    private PreparedStatement ps = null;
    private SqlSession sqlSession = null;
//    @Autowired
    private StudentMapper mapper;

    @Override
    public void open(Configuration parameters) throws Exception {
        super.open(parameters);
        this.sqlSession = MybatisSessionFactory.getSqlSessionFactory().openSession();
        this.mapper = sqlSession.getMapper(StudentMapper.class);
    }

//    private boolean flag = true;

    @Override
    public void run(SourceContext ctx) throws Exception {
        System.out.println(ctx);
        Student student = this.mapper.selectStudents("1");
        ctx.collect(student);
        this.sqlSession.commit();
    }




    /**
     * 取消任务 执行cancel
     */
    @Override
    public void cancel() {
//        flag = false;
    }

    /**
     * 结束任务,关闭连接
     * @throws Exception
     */
    @Override
    public void close() throws Exception {
        if (this.sqlSession != null)
            this.sqlSession.close();
    }

  

}

controller:

import com.founder.bigdata.compute.demo.bean.Student;
import com.founder.bigdata.compute.demo.service.impl.TestFlinkImpl;
import com.founder.bigdata.compute.demo.utils.MySQLSource;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;

import java.util.List;

/**
 * <p>
 * description:Springboot与Flink集成
 * </p>
 *
 * @author Bruce 2021/12/22 9:32
 * @program demo
 */
@RestController
@RequestMapping("/flink")
public class FlinkDemoController {

    @Autowired
    TestFlinkImpl testFlink;

    @RequestMapping("/test")
    public String get() throws Exception {

/*
        //1.准备环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //2.准备数据
        DataStream<Student> studentDataStreamSource = env.addSource(new MySQLSource());
        testFlink.getStudentFiledDistinct(2,studentDataStreamSource);
*/
//        testFlink.test();
//        StreamExecutionEnvironment.setDefaultLocalParallelism(1);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


        testFlink.TestNCConsumer(env);
        env.execute();
        return "  这是一个Flink代码测试程序...  ";
    }


    @RequestMapping(value = "/getStudentList/{id}",method = RequestMethod.GET)
    public List<Student> getStudentList(@PathVariable("id")String id){
        List<Student> students = testFlink.getStudentList(id);
        if (CollectionUtils.isEmpty(students))throw new RuntimeException("fucking the flink");
        return students;
    }


    @RequestMapping(value = "/addST",method = RequestMethod.GET)
    public String addST() throws Exception{
        int i = testFlink.addST();

        return i==1?"go head":"faild add";
    }




}

  • 2
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 4
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 4
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值