flink 各种输出sink

pom.xml

 <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-java</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-streaming-java_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-connector-kafka-0.11_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-statebackend-rocksdb_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-table-planner_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>mysql</groupId>
      <artifactId>mysql-connector-java</artifactId>
      <version>5.1.38</version>
    </dependency>
    <dependency>
      <groupId>org.apache.bahir</groupId>
      <artifactId>flink-connector-redis_2.11</artifactId>
      <version>1.0</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-streaming-scala_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-scala_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>

1. kafak

package kgc.kb11.beans;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.util.Properties;

/**
 * @author zhouhu
 * @Date
 * @Desription
 */

public class Sink1_kafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        Properties prop = new Properties();
        prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.119.125:9092");
        prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "sensor_group1");
        prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        prop.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");

        DataStreamSource<String> dataStream = env.addSource(new FlinkKafkaConsumer011<String>(
                "sensor",
                new SimpleStringSchema(),
                prop
        ));

        SingleOutputStreamOperator<String> result = dataStream.map(line -> {
            String[] split = line.split(",");
            String s = new SensorReading(
                    split[0],
                    Long.parseLong(split[1]),
                    Double.parseDouble(split[2])
            ).toString();
            return s;
        });
        result.addSink(new FlinkKafkaProducer011<String>(
                "192.168.119.125:9092",
                "sensorout",
                new SimpleStringSchema()
        ));
        result.print();
        env.execute("kafkademo");

    }
}

2.Mysql

package kgc.kb11.beans;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;

import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.util.Properties;

/**
 * @author zhouhu
 * @Date
 * @Desription
 */

public class Sink2_Mysql {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        Properties prop = new Properties();
        prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.119.125:9092");
        prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "sensor_group1");
        prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        prop.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        prop.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"latest");

        DataStreamSource<String> dataStream = env.addSource(new FlinkKafkaConsumer011<String>(
                "sensor",
                new SimpleStringSchema(),
                prop
        ));
        SingleOutputStreamOperator<SensorReading> result = dataStream.map(line -> {
            String[] split = line.split(",");
            SensorReading sensorReading = new SensorReading(
                    split[0],
                    Long.parseLong(split[1]),
                    Double.parseDouble(split[2])
            );
            return sensorReading;
        });
        result.addSink(new MyJdbcSink());

        env.execute("sinkMysqldemo");
    }

    private static class MyJdbcSink extends RichSinkFunction<SensorReading> {
        Connection connection = null;
        PreparedStatement insertstmt = null;
        PreparedStatement updatestmt = null;


        @Override
        public void open(Configuration parameters) throws Exception {
            connection = DriverManager.getConnection(
                    "jdbc:mysql://192.168.119.125:3306/flinkdemo",
                    "root",
                    "ok"
            );
            //准备预编译的插入语句
            insertstmt = this.connection.prepareStatement("insert into sensor_temp values(?,?)");
            //准备预编译的更新语句
            updatestmt = this.connection.prepareStatement("update sensor_temp set temp=? where id=?");
        }

        @Override
        public void invoke(SensorReading value, Context context) throws Exception {


            if (updatestmt.getUpdateCount() == 0) {
                insertstmt.setString(1, value.getId());
                insertstmt.setDouble(2, value.getTemperature());
                insertstmt.execute();
            }
            updatestmt.setDouble(1, value.getTemperature());
            updatestmt.setString(2, value.getId());
            updatestmt.execute();

        }
        @Override
        public void close() throws Exception {
            connection.close();
        }
    }
}

3.Redis

package kgc.kb11.beans;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;

/**
 * @author zhouhu
 * @Date
 * @Desription
 */

public class sink3_Redis {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        String filePath="D:\\ideas project\\flinkstu\\resources\\sensor.txt";
        DataStreamSource<String> inputStream = env.readTextFile(filePath);
        SingleOutputStreamOperator<SensorReading> dataStream = inputStream.map(line -> {
            String[] splits = line.split(",");
            return new SensorReading(
                    splits[0],
                    Long.parseLong(splits[1]),
                    Double.parseDouble(splits[2])
            );
        });
        FlinkJedisPoolConfig conf = new FlinkJedisPoolConfig.Builder()
                .setHost("192.168.119.125")
                .setPort(6379)
                .setDatabase(1)
                .build();

        dataStream.addSink(new RedisSink<>(conf, new RedisMapper<SensorReading>() {
            @Override
            public RedisCommandDescription getCommandDescription() {
                return new RedisCommandDescription(RedisCommand.HSET,"sensor");
            }

            @Override
            public String getKeyFromData(SensorReading sensorReading) {
                return sensorReading.getId();
            }

            @Override
            public String getValueFromData(SensorReading sensorReading) {
                return sensorReading.getTemperature().toString();
            }
        }));

    }
}

Collection

package kgc.kb11;

import kgc.kb11.beans.SensorReading;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.ArrayList;
import java.util.HashMap;

/**
 * @author zhouhu
 * @Date
 * @Desription
 */

public class Source1_Colelction {
    public static void main(String[] args) throws Exception {
        //创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        ArrayList<Object> list = new ArrayList<>();
        list.add(new SensorReading("sensor_1",1624853427L,37.5));
        list.add(new SensorReading("sensor_3",1624853428L,36.5));
        list.add(new SensorReading("sensor_5",1624853429L,35.5));
        list.add(new SensorReading("sensor_7",1624853431L,37.3));
        list.add(new SensorReading("sensor_9",1624853432L,37.9));

        //2.source
        DataStreamSource<Object> dataStreamSource = env.fromCollection(list);
        dataStreamSource.print("sensor");
        env.execute("collection");

    }
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值