package com.zjl.flink;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.http.HttpHost;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;
import java.util.*;
public class kafka {
public static void main(String[] args) throws Exception{
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(500);
//kafka
Properties properties = new Properties();
properties.setProperty( "bootstrap.servers","192.168.70.86:9092" );
properties.setProperty( "group.id","zjl" );
DeserializationSchema<String> deserializationSchema = new SimpleStringSchema();
String topic = "zjl";
FlinkKafkaConsumer<String> text1 = new FlinkKafkaConsumer<String>(topic,deserializationSchema,properties );
text1.setStartFromLatest();
DataStreamSource<String> test = env.addSource( text1 );
//es
List<HttpHost> esserver = new ArrayList<>();
esserver.add( new HttpHost( "192.168.70.9",9200,"http" ) );
ElasticsearchSink.Builder<String> essink = new ElasticsearchSink.Builder<>(
esserver,
new ElasticsearchSinkFunction<String>() {
@Override
public void process(String s, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
Map<String,String> json = new HashMap<>();
json.put( "message",s );
System.out.println(json);
IndexRequest indexRequest = Requests.indexRequest()
.index("flink")
.type("_doc")
.source(json);
requestIndexer.add( indexRequest );
}
}
);
essink.setBulkFlushMaxActions( 1 );
essink.setBulkFlushMaxSizeMb( 500 );
essink.setBulkFlushInterval( 5000 );
test.addSink( essink.build() );
//test.print();
env.execute("Flink-kafka demo");
}
}
flink读取kafka写入es
最新推荐文章于 2023-04-25 15:49:16 发布