Kafka接收的数据转存到ElasticSearch

依赖

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.3.6.RELEASE</version>
        <relativePath/>
    </parent>

    <groupId>org.example</groupId>
    <artifactId>ElasticSearchDemo</artifactId>
    <version>1.0-SNAPSHOT</version>

    <properties>
        <maven.compiler.source>8</maven.compiler.source>
        <maven.compiler.target>8</maven.compiler.target>
    </properties>
    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.62</version>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-data-elasticsearch</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-devtools</artifactId>
            <scope>runtime</scope>
            <optional>true</optional>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-test</artifactId>
        </dependency>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework</groupId>
            <artifactId>spring-test</artifactId>
        </dependency>
        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
            <version>2.3.12.RELEASE</version>
        </dependency>
        <dependency>
            <groupId>co.elastic.clients</groupId>
            <artifactId>elasticsearch-java</artifactId>
            <version>7.15.2</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
            <version>2.12.3</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-core</artifactId>
            <version>2.13.0</version>
        </dependency>
        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-annotations</artifactId>
            <version>2.13.0</version>
        </dependency>
    </dependencies>
</project>

application.properties文件

# es 服务地址
elasticsearch.host=127.0.0.1
# es 服务端口
elasticsearch.port=9200
# 配置日志级别,开启 debug 日志
spring.elasticsearch.rest.username=123456
spring.elasticsearch.rest.password=123456
logging.level.com.atguigu.es=debug


server:
port: 18082
spring:
kafka:
bootstrap-servers:127.0.0.1:9092
consumer: # consumer消费者
group-id: mentugroup # 默认的消费组ID
enable-auto-commit: true # 是否自动提交offset
auto-commit-interval: 100  # 提交offset延时(接收到消息后多久提交offset)
# earliest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费
# latest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据
# none:topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常
auto-offset-reset: latest
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer

启动类

@SpringBootApplication
public class KafkaConsumerApplication {

    public static void main(String[] args) {
        SpringApplication.run(KafkaConsumerApplication.class,args);
    }
}

实体类

@Data
@NoArgsConstructor
@AllArgsConstructor
@ToString
@Document(indexName = "alarm", shards = 5, replicas = 1)
//@Document(indexName = "demo")
public class Alarm {
    //必须有 id,这里的 id 是全局唯一的标识,等同于 es 中的"_id"
    @Id
    private Long id;//商品唯一标识

    /** lqy
     * type : 字段数据类型
     * analyzer : 分词器类型
     * index : 是否索引(默认:true)
     * Keyword : 短语,不进行分词
     */
    @Field(type = FieldType.Text)
    private String PRO_A;

    @Field(type = FieldType.Text)
    private String PW;

    @Field(type = FieldType.Date, format = DateFormat.custom,pattern = "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_second")
    private String SESSION_TIME;

    @Field(type = FieldType.Text)
    private String SUB_TYPE;

    @Field(type = FieldType.Text)
    private String R_ID;

    @Field(type = FieldType.Text)
    private String D_COUNTRY;

    @Field(type = FieldType.Text)
    private String TYPE;

    @Field(type = FieldType.Text)
    private String OP;

    @Field(type = FieldType.Text)
    private String S_PROVINCE;

    @Field(type = FieldType.Ip)
    private String D_IP;

    @Field(type = FieldType.Text)
    private String D_ORGANIZATION;

    @Field(type = FieldType.Text)
    private String D_CITY;

    @Field(type = FieldType.Text)
    private String USR_NE;

    @Field(type = FieldType.Text)
    private String ATK_RESULT;

    @Field(type = FieldType.Text)
    private String S_COUNTRY;

    @Field(type = FieldType.Text)
    private String S_CITY;

    @Field(type = FieldType.Text)
    private String S_DISTRICT;

    @Field(type = FieldType.Text)
    private String D_DISTRICT;

    @Field(type = FieldType.Integer)
    private String S_PORT;

    @Field(type = FieldType.Text)
    private String PRO_T;

    @Field(type = FieldType.Text)
    private String D_SCENES;

    @Field(type = FieldType.Text)
    private String ATK_DES;

    @Field(type = FieldType.Ip)
    private String S_IP;

    @Field(type = FieldType.Text)
    private String S_ORGANIZATION;

    @Field(type = FieldType.Integer)
    private String D_PORT;

    @Field(type = FieldType.Text)
    private String OP_RESULT;

    @Field(type = FieldType.Text)
    private String S_SCENES;

    @Field(type = FieldType.Text)
    private String D_PROVINCE;

    @Field(type = FieldType.Text)
    private String OUT_DATA;

    @Field(type = FieldType.Text)
    private String SESSION_ID;

    @Field(type = FieldType.Text)
    private Double TASK_ID;



}

es配置类

@ConfigurationProperties(prefix = "elasticsearch")
@Configuration
@Data
public class ElasticsearchConfig extends AbstractElasticsearchConfiguration {

    @Value("${elasticsearch.host}")
    private String host ;
    @Value("${elasticsearch.port}")
    private Integer port ;
    @Value("${spring.elasticsearch.rest.username}")
    private String username;
    @Value("${spring.elasticsearch.rest.password}")
    private String password;
    //重写父类方法
    @Override
    public RestHighLevelClient elasticsearchClient() {
        RestClientBuilder builder = RestClient.builder(new HttpHost(host, port));
//        CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
//        credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials("123456", "123456"));
//        builder.setHttpClientConfigCallback(f -> f.setDefaultCredentialsProvider(credentialsProvider));
        RestHighLevelClient restHighLevelClient = new RestHighLevelClient(builder);
        return restHighLevelClient;
    }
}

Dao

@Repository
public interface AlarmDao extends ElasticsearchRepository<Alarm, Long> {
}

创建索引

@RunWith(SpringRunner.class)
@SpringBootTest
public class SpringDataESIndexTest {
    //注入 ElasticsearchRestTemplate
    @Autowired
    private ElasticsearchRestTemplate elasticsearchRestTemplate;
    //创建索引并增加映射配置
    @Test
    public void createIndex(){
        //创建索引,系统初始化会自动创建索引
        System.out.println("创建索引");
    }
}

kafka接收并转存

/**
 * 接收kafka的数据,转存到es
 */
@RunWith(SpringRunner.class)
@Component
public class MessageListener {

    @Autowired
    private AlarmDao alarmDao;

    private Alarm alarm;

    @KafkaListener(topics = {"demo"},groupId = "itmentuGroup")
    public void listener(ConsumerRecord<String,String> record) {
        //从kafka获取消息
        if (record != null) {
            String message = record.value();

            //获得的消息转为对应的实体对象
            alarm = JSONObject.parseObject(message, Alarm.class);
            System.out.println(alarm.toString());

            //保存到es
            alarmDao.save(alarm);

            long offset = record.offset();
            System.out.println("读取的消息:" + message + "\n当前偏移量:" + offset);
        }
    }




}
  • 1
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值