DistributeCacheTest.java
package com.xxxxx.flink.batch;
import org.apache.commons.io.FileUtils;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.MapOperator;
import org.apache.flink.configuration.Configuration;
import java.io.File;
import java.util.List;
public class DistributeCacheTest {
public static void main(String[] args) throws Exception {
ExecutionEnvironment environment = ExecutionEnvironment.getExecutionEnvironment();
String project_path =System.getProperty("user.dir");
environment.registerCachedFile("file:///" + project_path + "/data/textfile", "myfile");
DataSource<String> elements = environment.fromElements("hadoop", "flink", "spark", "hbase");
MapOperator<String, String> map = elements.map(new RichMapFunction<String, String>() {
@Override
public void open(Configuration parameters) throws Exception {
File myfile = getRuntimeContext().getDistributedCache().getFile("myfile");
List<String> list = FileUtils.readLines(myfile);
for (String line : list) {
System.out.println("[" + line + "]");
}
}
@Override
public String map(String value) throws Exception {
return value;
}
});
map.print();
}
}