Flink读取数据

flink从kafka中读取数据

添加依赖

<dependency>
     <groupId>org.apache.flink</groupId>
     <artifactId>flink-connector-kafka_${scala.binary.version}</artifactId>
     <version>${flink.version}</version>
</dependency>

创建类

package hao.day03;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.util.Properties;

public class Flink_Source_FromKafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(3);//并行度的数量最好和kafka主题(topic)分区(partitionCount)的数量一致
        Properties properties = new Properties();
        properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"dragon01:9092");
        properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"hao");
        properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");
        //properties.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG);
        FlinkKafkaConsumer kafkaConsumer = new FlinkKafkaConsumer("hao", new SimpleStringSchema(), properties);
        DataStreamSource kafksDs = env.addSource(kafkaConsumer);
        kafksDs.print();
        env.execute();
    }
}

 

flink数据源是mysql

 

(密码是Mysql@123)

package hao.day03;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;

import java.sql.*;

public class Flink_Source_FromMysql {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataStreamSource<String> mysqlDs = env.addSource(new MySqlSource());
        mysqlDs.print();
        env.execute();

    }

    private static class MySqlSource implements SourceFunction<String>{
        private boolean running = true;
        @Override
        public void run(SourceContext<String> sourceContext) throws Exception {
            //注册驱动
            Class.forName("com.mysql.cj.jdbc.Driver");
            //获取链接地址
            String url = "jdbc:mysql://dragon01:3306/hao?useSSL=false";
            //创建链接
            Connection conn = DriverManager.getConnection(url,"root","Mysql@123");
            //创建sql语句
            String select = "select name from sss";
            PreparedStatement psmt = conn.prepareStatement(select);
            while (running){
                ResultSet rs =psmt.executeQuery();
                while (rs.next()){
                    String name = rs.getString(1);
                    sourceContext.collect(name);
                }
                psmt.close();
                conn.close();
                running = false;

            }
        }

        @Override
        public void cancel() {
            running = false;

        }
    }
}

 总结

 

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值