flink消费kafka写到mysql

main方法,消费kafka并sink到自定义实习类的mysql中

import akka.japi.tuple.Tuple4;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSink;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.io.Serializable;
import java.util.Properties;

/**
 * @author Miller
 * @date 2021年09月18日 2:16 下午
 * @description
 */

public class KafkaToMysql implements Serializable {

	//定义内部类,和需要写入的表结构一致
    static class user {

        final String name;
        final String gender;
        final String phoneNumber;
        final Integer age;

        public user(String name, Integer age, String gender, String phoneNumber) {
            this.name = name;
            this.age = age;
            this.gender = gender;
            this.phoneNumber = phoneNumber;
        }


    }

    public static void main(String[] args) throws Exception {

		//kafka相关配置
        String topic = "mykafka";
        Properties kafkaConf = new Properties();
        kafkaConf.put(ConsumerConfig.GROUP_ID_CONFIG,"kafkaTest1");
        kafkaConf.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"10.0.27.21:9092");
        kafkaConf.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,true);
        kafkaConf.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 10);
        kafkaConf.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 3000);
        kafkaConf.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 120000);
        kafkaConf.put(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, 180000);
        kafkaConf.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        kafkaConf.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        //获取流执行环境
        StreamExecutionEnvironment envs = StreamExecutionEnvironment.getExecutionEnvironment();

		//添加kafka source
        DataStreamSource<String> mykafka = envs.addSource(new FlinkKafkaConsumer<String>(topic, new SimpleStringSchema(), kafkaConf));
		//打印数据
        mykafka.print();
         *//*new user("李华123", 12, "男", "1881881888")*//*
		//kafka数据转换成	Tuple4<String, Integer, String, String>类型	
        mykafka.map((MapFunction<String, Tuple4<String, Integer, String, String>>) value ;

/*今天在使用Flink 时泛型采用的时Tuple,在后面进行算子操作时,采用了lamada表达式发现,代码运行时报以下错误
The generic type parameters of ‘Tuple4’ are missing. In many cases lambda methods don’t provide enough information for automatic type extraction when Java generics are involved. An easy workaround is to use an (anonymous) class instead that implements the ‘org.apache.flink.api.common.functions.MapFunction’ interface. Otherwise the type has to be specified explicitly using type information.
其翻译过来缺少“Tuple4”的泛型类型参数。在许多情况下,当涉及Java泛型时,lambda方法不能为自动类型提取提供足够的信息。一个简单的解决方法是使用(匿名)类来实现网址:apache.flink.api.common.functions函数.MapFunction的接口。否则,必须使用类型信息显式指定类型。*/

		//内部类转换数据格式
		//如果直接通过流返回Tuple4则会报错,当涉及Java泛型时,lambda方法不能为自动类型提取提供足够的信息
		
        DataStream<Tuple4<String, Integer, String, String>> stream = mykafka.map(new MapFunction<String, Tuple4<String, Integer, String, String>>() {

            private static final long serialVersionUID = 1L;

            @Override
            public Tuple4<String, Integer, String, String> map(String value) throws Exception {
                String[] strings = value.split(",");
                return new Tuple4<String, Integer, String, String>(strings[0],Integer.parseInt(strings[1]),strings[2],strings[3]);
            }
        });    
        stream.addSink(new MysqlImpl());
        envs.execute();
    }
}

mysql Sink 实现类

import akka.japi.tuple.Tuple4;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;

/**
 * @author Miller
 * @date 2022年03月25日 6:32 下午
 * @description
 */

public class MysqlImpl extends RichSinkFunction<Tuple4<String, Integer, String, String>> {
        private Connection connection;
        private PreparedStatement preparedStatement;
        String username = "root";
        String password = "12345678";
        String drivername = "com.mysql.jdbc.Driver";   //配置改成自己的配置
        String dburl = "jdbc:mysql://localhost:3306/test_local";

    @Override
    public void invoke(Tuple4<String, Integer, String, String> value) throws Exception {
        //SinkFunction.super.invoke(value);
        Class.forName(drivername);
        connection = DriverManager.getConnection(dburl, username, password);
        String sql = "insert into user (name ,age,gender,phone_number) values(?,?,?,?)"; //假设mysql 有4列 name ,age,gender,phone_number
        preparedStatement = connection.prepareStatement(sql);
        preparedStatement.setString(2, value.t1());
        preparedStatement.setInt(1, value.t2());
        preparedStatement.setString(3, value.t3());
        preparedStatement.setString(3, value.t4());
        preparedStatement.executeUpdate();
        if (preparedStatement != null) {
            preparedStatement.close();
        }
        if (connection != null) {
            connection.close();
        }
    }

    
    @Override
    public void invoke(Tuple4<String, Integer, String, String> value, Context context) throws Exception {
        //SinkFunction.super.invoke(value, context);
        Class.forName(drivername);
        connection = DriverManager.getConnection(dburl, username, password);
        String sql = "insert into user (name ,age,gender,phone_number) values(?,?,?,?)"; //假设mysql 有3列 id,num,price
        preparedStatement = connection.prepareStatement(sql);
        preparedStatement.setString(1, value.t1());
        preparedStatement.setInt(2, value.t2());
        preparedStatement.setString(3, value.t3());
        preparedStatement.setString(4, value.t4());
        preparedStatement.execute();
        if (preparedStatement != null) {
            preparedStatement.close();
        }
        if (connection != null) {
            connection.close();
        }
    }

}


  • 2
    点赞
  • 7
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值