一个简单的kafka写入elasticSearch的 demo

3 篇文章 0 订阅
3 篇文章 0 订阅

listener




import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import com.csoft.kafkademo.kafkademo.entity.Kafka1Message;
import org.apache.http.HttpHost;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.GetIndexRequest;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;


/**
 * @author: zhangxirui
 * @date: 2021-02-07 9:27
 * @version: 1.0
 */
@Component
public class KafkaListenerr {

    static final RestHighLevelClient restHighLevelClient =
            new RestHighLevelClient(
                    RestClient.builder(
                            new HttpHost("ip", 9500)
                    )
            );

    @KafkaListener(topics = {"my-message-topic"})
    public void listen(ConsumerRecord<?, ?> record) throws IOException, ClassNotFoundException {
        Optional<?> kafkaMessage = Optional.ofNullable(record.value());
        if (kafkaMessage.isPresent()) {
            System.out.println("消息接收成功");
            String message = (String) kafkaMessage.get();
            System.out.println(message);
            List<Kafka1Message> kafka1Message = JSON.parseObject(message, new TypeReference<List<Kafka1Message>>() {
            });
            String topic = record.topic() + "_index";

            if (ifIndexExist(topic)) {
                intoIndexValue(topic, kafka1Message);
            } else {
                createIndex(topic, kafka1Message);
                intoIndexValue(topic, kafka1Message);
            }

        }
    }

    private boolean ifIndexExist(String topic) throws IOException {
        GetIndexRequest getIndexRequest = new GetIndexRequest(topic);
        boolean exists = restHighLevelClient.indices().exists(getIndexRequest, RequestOptions.DEFAULT);
        return exists;
    }

    /**
     * @throws IOException 向索引插入数据
     */
    private void intoIndexValue(String topic, List<Kafka1Message> kafka1Message) throws IOException {
        for (Kafka1Message message : kafka1Message) {

            //构建批量插入对象
            BulkRequest bulkRequest = new BulkRequest();

            XContentBuilder xContentBuilder = XContentFactory.jsonBuilder();
            xContentBuilder.startObject();
            {
                xContentBuilder.field("hobby", message.getHobby());
                xContentBuilder.field("name", message.getName());
                xContentBuilder.field("sex", message.getSex());
            }
            xContentBuilder.endObject();

            //创建索引
            IndexRequest indexRequest = new IndexRequest(topic);
            indexRequest.source(xContentBuilder);

            bulkRequest.add(indexRequest);

            BulkResponse bulkResponse = restHighLevelClient.bulk(bulkRequest, RequestOptions.DEFAULT);

            for (BulkItemResponse bulkItemResponse : bulkResponse) {
                DocWriteResponse response = bulkItemResponse.getResponse();
                IndexResponse indexResponse = (IndexResponse) response;
                System.out.println("批量新增成功,{}" + indexResponse.toString());
            }

        /*IndexResponse indexResponse = restHighLevelClient.index(indexRequest, RequestOptions.DEFAULT);
        System.out.println("新增成功,{}" + indexResponse.toString());*/
        }
    }

    /**
     * @throws IOException 创建索引
     */
    private void createIndex(String topic, List<Kafka1Message> kafka1Message) throws IOException, ClassNotFoundException {
        //索引
        CreateIndexRequest createIndexRequest = new CreateIndexRequest(topic);

        createIndexRequest.settings(Settings.builder()
                //number_of_shards 数据分片数 默认为5,有时候设置为3
                .put("index.number_of_shards", "2")
                //number_of_shards 数据备份数 如果只有一台机器 设置为0
                .put("index.number_of_replicas", "0")
        );

        //构建索引mapping
        XContentBuilder xContentBuilder = XContentFactory.jsonBuilder();
        xContentBuilder.startObject();
        {
            xContentBuilder.startObject("properties");
            {
                //字段类型的map
                Map<String, String> typeMap = new HashMap<>();
                typeMap.put("type", "text");
                //设置字段的名称和类型
                xContentBuilder.field("hobby", typeMap);
                xContentBuilder.field("name", typeMap);
                xContentBuilder.field("sex", typeMap);
            }
            xContentBuilder.endObject();
        }
        xContentBuilder.endObject();

        createIndexRequest.mapping(xContentBuilder);

        //开始创建索引
        restHighLevelClient.indices().create(createIndexRequest, RequestOptions.DEFAULT);
        System.out.println("创建成功");
    }
}

provder




import com.alibaba.fastjson.JSON;
import com.csoft.kafkademo.kafkademo.entity.Kafka1Message;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.List;


/**
 * @author: zhangxirui
 * @date: 2021-02-07 9:27
 * @version: 1.0
 */
@Component
@EnableScheduling
public class KafkaProvder {
    @Autowired
    private KafkaTemplate kafkaTemplate;

    @Scheduled(cron = "* * * * * ?")
    public void producerr() {
        List<Kafka1Message> messages = new ArrayList<Kafka1Message>();
        Kafka1Message kafka1Message = new Kafka1Message();
        kafka1Message.setName("jack");
        kafka1Message.setHobby("打游戏");
        kafka1Message.setSex("男");

        Kafka1Message kafka2Message = new Kafka1Message();
        kafka2Message.setName("ketty");
        kafka2Message.setHobby("画画");
        kafka2Message.setSex("女");

        messages.add(kafka2Message);
        messages.add(kafka1Message);

        String s = JSON.toJSONString(messages);

        kafkaTemplate.send(new ProducerRecord<String, String>("my-message-topic", "hello" , s));
        System.out.println("发送成功");
    }
}

entity


import lombok.Data;

/**
 * @author: zhangxirui
 * @date: 2021-02-08 16:19
 * @version: 1.0
 */
@Data
public class Kafka1Message {
    private String name;
    private String hobby;
    private String sex;
}

配置文件
server:
  port: 8080

spring:
  kafka:
    bootstrap-servers: ip:9092
    consumer:
      group-id: test-consumer
    producer:
      batch-size: 16384
      buffer-memory: 33554432
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      retries: 0
      value-serializer: org.apache.kafka.common.serialization.StringSerializer

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值