flink 测试demo
以flink1.13.1为例
1. maven
<project>
<groupId>com.example</groupId>
<artifactId>flink-example</artifactId>
<version>1.0-SNAPSHOT</version>
<dependencies>
<!-- Flink依赖 -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>1.13.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.12</artifactId>
<version>1.13.1</version>
</dependency>
<!-- JUnit依赖 -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.2</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<!-- Flink插件 -->
<plugin>
<groupId>org.apache.flink</groupId>
<artifactId>flink-maven-plugin</artifactId>
<version>1.13.1</version>
<executions>
<execution>
<id>package</id>
<goals>
<goal>package</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
2. prop.json 参数
{
"topic":"pupu_main",
"kafka_brokers":"127.0.0.1:9092",
"group_id":"pHistTask",
"s3_path":"/Users/Desktop/code/test",
"checkpoint.interval":"60000",
"checkpoint.timeout": "1200000"
}
3. code
3.1 FlinkTest1.class
package demo.test;
import com.alibaba.fastjson.JSON;
import lombok.Builder;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
@Builder
public class FlinkTest1 {
public void start(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironment();
ParameterTool params = ParameterTool.fromArgs(args);
System.out.println(JSON.toJSON(params));
env.setParallelism(1);
// 构建测试数据
DataStream<String> dataStream = env.fromElements("Hello", "World");
// 执行测试操作
DataStream<String> resultStream = dataStream.map(String::toUpperCase);
// 验证测试结果
resultStream.print();
env.execute();
}
}
3.1 FlinkTest1.class
package demo.test;
import cn.hutool.core.io.FileUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.java.utils.ParameterTool;
import org.junit.Test;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
public class FlinkDemoTest {
@Test
public void FlinkTest1() throws Exception {
String[] args = readFile("prop.json");
FlinkTest1.builder().build().start(args);
}
private String[] readFile(String propFileName) throws Exception {
List<String> sList = new ArrayList<>();
File file = new File(Objects.requireNonNull(FlinkDemoTest.class.getClassLoader().getResource(propFileName)).getPath());
JSONObject jsonObject = JSON.parseObject(FileUtil.readString(file, "UTF-8"));
for (String key : jsonObject.keySet()) {
String value = jsonObject.getString(key);
sList.add("--" + key);
sList.add(value);
}
return sList.toArray(new String[0]);
}
public static void main(String[] args) throws Exception {
List<String> sList = new ArrayList<>();
File file = new File(Objects.requireNonNull(FlinkDemoTest.class.getClassLoader().getResource("prop.json")).getPath());
JSONObject jsonObject = JSON.parseObject(FileUtil.readString(file, "UTF-8"));
for (String key : jsonObject.keySet()) {
String value = jsonObject.getString(key);
sList.add("--" + key);
sList.add(value);
}
String[] result = sList.toArray(new String[0]);
ParameterTool params = ParameterTool.fromArgs(result);
System.out.println(JSON.toJSONString(params.toMap()));
}
}