零、环境准备
- 参考上一篇《hadoop安装记录》安装hadoop、yarn环境
一、准备测试程序
- pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>flink-tutorial</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<flink.version>1.17.0</flink.version>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients</artifactId>
<version>${flink.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.4</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<artifactSet>
<excludes>
<exclude>org.slf4j:*</exclude>
<exclude>log4j:*</exclude>
</excludes>
</artifactSet>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<transformers combine.children="append">
<transformer
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer">
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
- WcStreamDemo.java
package org.example.flinktutorial;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;
public class WcStreamDemo {
public static void main(String[] args) throws Exception {
//环境
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//socket数据源
final DataStreamSource<String> dataStreamSource = env.socketTextStream("node1", 7777);
//分词
final SingleOutputStreamOperator<Tuple2<String, Integer>> flatMap = dataStreamSource.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
@Override
public void flatMap(String inputStr, Collector<Tuple2<String, Integer>> out) throws Exception {
final String[] words = inputStr.split(" ");
for (String word : words) {
final Tuple2<String, Integer> of = Tuple2.of(word, 1);
out.collect(of);
}
}
});
//分组
final KeyedStream<Tuple2<String, Integer>, String> ks = flatMap.keyBy(new KeySelector<Tuple2<String, Integer>, String>() {
@Override
public String getKey(Tuple2<String, Integer> value) throws Exception {
return value.f0;
}
});
//聚合
final SingleOutputStreamOperator<Tuple2<String, Integer>> sumDS = ks.sum(1);
//输出
sumDS.print();
//执行
env.execute();
}
}
二、安装
- 在node1上执行
cd /export/server/
curl -OL https://dlcdn.apache.org/flink/flink-1.17.2/flink-1.17.2-bin-scala_2.12.tgz
tar xf flink-1.17.2-bin-scala_2.12.tgz
ln -s flink-1.17.2 flink
cat >> /etc/profile << EOF
export HADOOP_CONF_DIR=\${HADOOP_HOME}/etc/hadoop
export HADOOP_CLASSPATH=\`hadoop classpath\`
EOF
source /etc/profile
- 测试
在node1上执行nc -lk 7777开启socketServer监听7777端口,启动flink-yarn模式运行
cd /export/server/flink
hadoop fs -mkdir /flink-dist
hadoop fs -put lib /flink-dist
hadoop fs -put plugins /flink-dist
hadoop fs -mkdir /flink-jars
hadoop fs -put flink-tutorial-1.0-SNAPSHOT.jar /flink-jars
bin/flink run-application -t yarn-application -Dyarn.provided.libs.dirs="hdfs://node1:8020/flink-dist" -c org.example.WcStreamDemo hdfs://node1:8020/flink-jars/flink-tutorial-1.0-SNAPSHOT.jar
在node1上的netcat里输入数据
- 预期结果