flink读取kafka写入es

package com.zjl.flink;


import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;

import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.http.HttpHost;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;



import java.util.*;

public class kafka {

    public static void main(String[] args) throws Exception{
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(500);
        //kafka
        Properties properties = new Properties();
        properties.setProperty( "bootstrap.servers","192.168.70.86:9092" );
        properties.setProperty( "group.id","zjl" );
        DeserializationSchema<String> deserializationSchema = new SimpleStringSchema();
        String topic = "zjl";
        FlinkKafkaConsumer<String> text1 = new FlinkKafkaConsumer<String>(topic,deserializationSchema,properties  );
        text1.setStartFromLatest();
        DataStreamSource<String> test = env.addSource( text1 );
        //es
        List<HttpHost> esserver = new ArrayList<>();
        esserver.add( new HttpHost( "192.168.70.9",9200,"http" ) );
        ElasticsearchSink.Builder<String> essink = new ElasticsearchSink.Builder<>(
                esserver,
                new ElasticsearchSinkFunction<String>() {
                    @Override
                    public void process(String s, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
                        Map<String,String> json = new HashMap<>();
                        json.put( "message",s );
                        System.out.println(json);
                        IndexRequest indexRequest = Requests.indexRequest()
                                .index("flink")
                                .type("_doc")
                                .source(json);
                        requestIndexer.add( indexRequest );
                    }
                }
        );
        essink.setBulkFlushMaxActions( 1 );
        essink.setBulkFlushMaxSizeMb( 500 );
        essink.setBulkFlushInterval( 5000 );
        test.addSink( essink.build() );
        //test.print();
        env.execute("Flink-kafka demo");
    }
    }

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值