# 情景
我是在flink集成es的时候,出现的当前问题
# 问题
NoSuchMethodError: org.elasticsearch.client.RestHighLevelClient.ping([Lorg/apache/http/Header;)Z
# 现象
启动报错,flink无法提交job
# 原因
主要是maven依赖问题
# 解决
<!-- kafka-flink-source/sink连接器 -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.11_2.12</artifactId>
<version>1.10.1</version>
</dependency>
<!-- flink -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>1.10.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.12</artifactId>
<version>1.10.1</version>
</dependency>
<!-- elasticsearch connector -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-elasticsearch7_2.11</artifactId>
<version>1.11.0</version>
</dependency>
# 重点
最后一个maven版本:elasticsearch-connector
我这里使用的es版本是7.6.2,flink这里是1.10.1
# 如下是代码
@SpringBootTest
class DemoApplicationTests {
@Test
void contextLoads() throws Exception {
// 获取流处理执行环境
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// 从kafka中读取数据
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", "10.43.80.80:9092");
DataStream<String> dataStream = env.addSource(
new FlinkKafkaConsumer011<String>("from-kafka", new SimpleStringSchema(), properties)
);
// 数据发送到elasticsearch
List<HttpHost> httpHosts = List.of(new HttpHost("192.168.43.43", 9200));
dataStream.addSink(new ElasticsearchSink.Builder<String>(httpHosts,new ElasticsearchMapper()).build());
// 打印读取数据
dataStream.print();
// 执行任务
env.execute();
}
public static class ElasticsearchMapper implements ElasticsearchSinkFunction<String> {
@Override
public void process(String s, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
// 组装数据
Map<String, String> datas = Map.of("data", s);
// 组织es结构
IndexRequest indexRequest = Requests.indexRequest()
.id("es")
.index("flag")
.source(datas);
// 发送数据
requestIndexer.add(indexRequest);
}
}
}