pom.xml依赖
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.11</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.11_2.11</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-statebackend-rocksdb_2.11</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_2.11</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.38</version>
</dependency>
<dependency>
<groupId>org.apache.bahir</groupId>
<artifactId>flink-connector-redis_2.11</artifactId>
<version>1.0</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-scala_2.11</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala_2.11</artifactId>
<version>1.10.1</version>
</dependency>
1.内存
package kgc.kb11;
import kgc.kb11.beans.SensorReading;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import java.util.ArrayList;
public class Source1_Colelction {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
ArrayList<Object> list = new ArrayList<>();
list.add(new SensorReading("sensor_1",1624853427L,37.5));
list.add(new SensorReading("sensor_3",1624853428L,36.5));
list.add(new SensorReading("sensor_5",1624853429L,35.5));
list.add(new SensorReading("sensor_7",1624853431L,37.3));
list.add(new SensorReading("sensor_9",1624853432L,37.9));
DataStreamSource<Object> dataStreamSource = env.fromCollection(list);
dataStreamSource.print("sensor");
env.execute("collection");
}
}
2.文件
package kgc.kb11.beans;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
public class Source2_File {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
DataStreamSource<String> inputDataStream =
env.readTextFile("D:\\ideas project\\flinkstu\\resources\\words.txt");
inputDataStream.print("filesource");
env.execute("fileSource");
}
}
3.kakfa
package kgc.kb11.beans;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.util.Properties;
public class Source3_kafka {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Properties prop = new Properties();
prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.119.125:9092");
prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"sensor_group1");
prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
prop.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
DataStreamSource<String> dataStream = env.addSource(new FlinkKafkaConsumer011<String>(
"sensor",
new SimpleStringSchema(),
prop
));
dataStream.print();
env.execute("kafkademo");
}
}
nc
package kgc.kb11;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
public class WordCount {
public static void main(String[] args) {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
DataStreamSource<String> inputDataStream =
env.socketTextStream("192.168.119.125", 7777);
SingleOutputStreamOperator<Tuple2<String, Integer>> sum =
inputDataStream.flatMap(new MyFlatMap()).keyBy(0).sum(1);
sum.print();
try {
env.execute();
} catch (Exception e) {
e.printStackTrace();
}
}
public static class MyFlatMap implements FlatMapFunction<String, Tuple2<String,Integer>>{
@Override
public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
String[] words = s.split("\\s+");
for (String word : words) {
collector.collect(new Tuple2<String,Integer>(word,1));
}
}
}
}
4.自定义
package kgc.kb11.beans;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import java.util.Random;
public class source_MySource {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<SensorReading> dataStreamSource = env.addSource(new MySensorSource());
dataStreamSource.print();
env.execute("yes");
}
private static class MySensorSource implements SourceFunction<SensorReading> {
boolean flag=true;
@Override
public void run(SourceContext<SensorReading> ctx) throws Exception {
while (flag) {
ctx.collect(
new SensorReading(
"",
System.currentTimeMillis(),
new Random().nextInt(9)+30.0
)
);
Thread.sleep(1000);
}
}
@Override
public void cancel() {
}
}
}