关于Kafka数据传输应用设置的简单应用DEMO

package com.test.shop.service.impl;


import com.test.shop.service.DataCoreService;
import com.test.shop.service.SendScheduService;
import com.google.gson.Gson;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;


/**
 * 发送调度系统消息处理
 */
@Service
public class SendScheduerviceImpl implements SendScheduService {

    @Autowired
    private DataCoreService dataCoreService;

    @Autowired
    private KafkaTemplate<String,String> kafkaTemplate;

    private final String key001="test_KEY_001";


    /**
     * 订单下发调度系统
     */
    public void sendBykafka(Map<String, Object> _sMap) throws Exception {

        String querySql = "B_CORE_DATA_R000014";
        if(_sMap.get("creditFlag")!=null && "1".equals(_sMap.get("creditFlag"))){
            querySql = "B_CORE_DATA_R000074";//经理赊欠确认,发送前的查询数据,不需要查询交易记录表
        }

        //根据流水号查询订单信息、分店ID
        List<Map<String, Object>> dataCoreList = dataCoreService.getResultByTCODE(querySql, _sMap);
        if (dataCoreList!=null && dataCoreList.size() == 0) {
            throw new Exception("订单异常,请联系管理员");
        }

        List<Map<String, Object>> newCoreList = new ArrayList<Map<String, Object>>();
        Object obj = null;
        for (Map<String, Object> m : dataCoreList) {
            obj = m.get("createTime");
            if(obj == null){
              continue;
            }
            m.put("createTime", ((Date)obj).getTime());
            newCoreList.add(m);
        }

        StringBuffer mTopic = new StringBuffer();
        List<Map<String, Object>> msgList = new ArrayList<Map<String, Object>>();

        for (Map<String, Object> map : newCoreList) {
            if ("1".equals(map.get("mode"))) {
                msgList.add(map);
                mTopic.append("order_").append(map.get("branchId")).append("_1"); //[人工:order_分店编号_1]
            } else {
                msgList.add(map);
                //[机器:order_分店编号]
                mTopic.append("order_").append(map.get("branchId"));
            }
        }

        if(msgList.size()>0) {
            kafkaTemplate.send(mTopic.toString(), key001, new Gson().toJson(msgList));
        }
    }


    /**
     * 下发消息给kafka
     * @param topic 标题
     * @param dataList 数据
     * @throws Exception
     */
    public void sendBykafka(String topic,List<Map<String,Object>> dataList) throws Exception {
        kafkaTemplate.send(topic, key001, new Gson().toJson(dataList));
    }


    /**
     * 下发消息给kafka
     * @param topic 标题
     * @param key 约定的key
     * @param dataList 数据
     * @throws Exception
     */
    public void sendBykafka(String topic,String key,List<Map<String,Object>> dataList) throws Exception {
        kafkaTemplate.send(topic, key001, new Gson().toJson(dataList));
    }

    /**
     * 下发消息给kafka
     * @param topic 标题
     * @param dataJson 数据
     * @throws Exception
     */
    public void sendBykafka(String topic,String dataJson) throws Exception {
        kafkaTemplate.send(topic, key001, dataJson);
    }


    /**
     * 下发消息给kafka
     * @param topic 标题
     * @param key 约定的key
     * @param dataJson 数据
     * @throws Exception
     */
    public void sendBykafka(String topic,String key,String dataJson) throws Exception {
        kafkaTemplate.send(topic, key001, dataJson);
    }

}

spring:
  application:
    name: shop
  datasource:
    driver-class-name: com.mysql.jdbc.Driver
    username: root
    password: *****
    url: jdbc:mysql://IP地址/TEST_restaurant?characterEncoding=utf-8&
  redis:
    host:  IP地址
    port:  6379
    password:  *****
  # -------------------- Kafka --------------------
  kafka:
    # bootstrap-servers: ["192.168.**.**:9092","192.168.**.**:9092","192.168.**.**:9092"]
    bootstrap-servers: ["120.77.**.**:9092"]
    #对应字符串列表(属于"spring.kafka"配置组)
    schema-registry-url:
        - "http://120.77.**.**:18081"
        #- "http://192.168.**.**:18081"
    #下面的这些配置属于"spring.kafka.producer"配置组
    producer:
        retries: 0
        batch-size: 4096
        buffer-memory: 40960
        enableIdempotence: true #写成驼峰式
        max-in-flight-requests-per-connection: 1 #横线连接
        linger-ms: 10
        topic: "order_2"
        key-serializer: org.apache.kafka.common.serialization.StringSerializer
        value-serializer: org.apache.kafka.common.serialization.StringSerializer
    #下面的这些配置属于"spring.kafka.consumer"配置组
    consumer:
        auto-offset-reset: "latest"
        groupId: "record-service"
        topics: ["order_2"]
        group-id: test
        session-timeout: 6000
        enable-auto-commit: true
        auto-commit-interval: 1000
        concurrency: 10
        key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
        value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
eureka:
  client:
    service-url:
      defaultZone: http://120.77.**.**:8761/eureka/
server:
  port: 8084
ribbon:  
  ReadTimeout: 60000  
  ConnectTimeout: 60000
mybatis:
  mapper-locations: classpath:com/TEST/shop/mapper/*.xml
  config-locations: classpath:mybatis/mybatis-config.xml
		<!-- Kafka -->
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-configuration-processor</artifactId>
			<optional>true</optional>
		</dependency>
		<dependency>
			<groupId>org.springframework.kafka</groupId>
			<artifactId>spring-kafka</artifactId>
		</dependency>

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 2
    评论
Java中使用Kafka进行数据传输需要进行以下步骤: 1. 配置Kafka:首先,您需要在项目中添加Kafka的依赖项。在Maven项目中,您可以在pom.xml文件中添加以下依赖项: ```xml <dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka-clients</artifactId> <version>2.8.0</version> </dependency> ``` 2. 创建生产者:使用Kafka提供的Producer API,您可以创建一个生产者实例并配置所需的属性。例如,设置Kafka集群的地址、序列化器等。 ```java import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.KafkaProducer; import java.util.Properties; public class KafkaProducerExample { public static void main(String[] args) { // Kafka配置 Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); // 创建生产者实例 Producer<String, String> producer = new KafkaProducer<>(props); // 发送消息 String topic = "my-topic"; String key = "key1"; String value = "Hello, Kafka!"; ProducerRecord<String, String> record = new ProducerRecord<>(topic, key, value); producer.send(record); // 关闭生产者 producer.close(); } } ``` 3. 创建消费者:使用Kafka提供的Consumer API,您可以创建一个消费者实例并配置所需的属性。例如,设置Kafka集群的地址、反序列化器等。 ```java import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import java.util.Arrays; import java.util.Properties; public class KafkaConsumerExample { public static void main(String[] args) { // Kafka配置 Properties props = new Properties(); props.put("bootstrap.servers", "localhost:9092"); props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.put("group.id", "my-group"); // 创建消费者实例 Consumer<String, String> consumer = new KafkaConsumer<>(props); // 订阅主题 String topic = "my-topic"; consumer.subscribe(Arrays.asList(topic)); // 消费消息 while (true) { ConsumerRecords<String, String> records = consumer.poll(100); for (ConsumerRecord<String, String> record : records) { System.out.println("Received message: key=" + record.key() + ", value=" + record.value()); } } // 关闭消费者 consumer.close(); } } ``` 通过上述代码示例,您可以在Java中使用Kafka进行数据传输。生产者将消息发送到指定的主题,而消费者订阅该主题并从中接收消息。您可以根据需要进行进一步的配置和处理。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

兰舟轻帆

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值