flink 各种数据源sources

pom.xml依赖

 <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-java</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-streaming-java_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-connector-kafka-0.11_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-statebackend-rocksdb_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-table-planner_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>mysql</groupId>
      <artifactId>mysql-connector-java</artifactId>
      <version>5.1.38</version>
    </dependency>
    <dependency>
      <groupId>org.apache.bahir</groupId>
      <artifactId>flink-connector-redis_2.11</artifactId>
      <version>1.0</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-streaming-scala_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>
    <dependency>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-scala_2.11</artifactId>
      <version>1.10.1</version>
    </dependency>

1.内存

package kgc.kb11;

import kgc.kb11.beans.SensorReading;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.ArrayList;

/**
 * @author zhouhu
 * @Date
 * @Desription
 */

public class Source1_Colelction {
    public static void main(String[] args) throws Exception {
        //创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        ArrayList<Object> list = new ArrayList<>();
        list.add(new SensorReading("sensor_1",1624853427L,37.5));
        list.add(new SensorReading("sensor_3",1624853428L,36.5));
        list.add(new SensorReading("sensor_5",1624853429L,35.5));
        list.add(new SensorReading("sensor_7",1624853431L,37.3));
        list.add(new SensorReading("sensor_9",1624853432L,37.9));

        //2.source
        DataStreamSource<Object> dataStreamSource = env.fromCollection(list);
        dataStreamSource.print("sensor");
        env.execute("collection");
    }
}

2.文件

package kgc.kb11.beans;

        import org.apache.flink.streaming.api.datastream.DataStreamSource;
        import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * @author zhouhu
 * @Date
 * @Desription
 */

public class Source2_File {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<String> inputDataStream =
                env.readTextFile("D:\\ideas project\\flinkstu\\resources\\words.txt");
        inputDataStream.print("filesource");
        env.execute("fileSource");
    }
}

3.kakfa

package kgc.kb11.beans;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;


import java.util.Properties;

/**
 * @author zhouhu
 * @Date
 * @Desription
 */

public class Source3_kafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        Properties prop = new Properties();
        prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.119.125:9092");
        prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"sensor_group1");
        prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        prop.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");

        DataStreamSource<String> dataStream = env.addSource(new FlinkKafkaConsumer011<String>(
                "sensor",
                new SimpleStringSchema(),
                prop
        ));

        dataStream.print();
        env.execute("kafkademo");


    }
}

nc

package kgc.kb11;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;


/**
 * @author zhouhu
 * @Date
 * @Desription wordcount
 */

public class WordCount {
    public static void main(String[] args) {
        //定义执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataStreamSource<String> inputDataStream =
                env.socketTextStream("192.168.119.125", 7777);



        SingleOutputStreamOperator<Tuple2<String, Integer>> sum =
                inputDataStream.flatMap(new MyFlatMap()).keyBy(0).sum(1);
        sum.print();

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    public static class MyFlatMap implements FlatMapFunction<String, Tuple2<String,Integer>>{
        @Override
        public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
            String[] words = s.split("\\s+");
            for (String word : words) {
                collector.collect(new Tuple2<String,Integer>(word,1));
            }
        }
    }

}

4.自定义

package kgc.kb11.beans;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;

import java.util.Random;

/**
 * @author zhouhu
 * @Date
 * @Desription
 */

public class source_MySource {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        DataStreamSource<SensorReading> dataStreamSource = env.addSource(new MySensorSource());
        dataStreamSource.print();
        env.execute("yes");
    }

    private static class MySensorSource implements SourceFunction<SensorReading> {
        boolean flag=true;

        @Override
        public void run(SourceContext<SensorReading> ctx) throws Exception {
            while (flag) {
                ctx.collect(
                        new SensorReading(
                                "",
                                System.currentTimeMillis(),
                                new Random().nextInt(9)+30.0
                        )
                );
                Thread.sleep(1000);
            }
        }

        @Override
        public void cancel() {

        }
    }
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值