一、多线程开发生产者类
package com.ldy.bootv2.demo.jms;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.configurationprocessor.json.JSONException;
import org.springframework.boot.configurationprocessor.json.JSONObject;
public class JProducerThread extends Thread{
//创建日志对象
private final Logger LOG = LoggerFactory.getLogger(JProducer.class);
private final static int MAX_THREAD_SIZE = 6; //最大线程数量
//配置kafka链接信息
public Properties configure() {
Properties props = new Properties();
//指定kakfa集群地址
props.put("bootstrap.servers", "localhost:9092");
props.put("acks", "1");
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringDeserializer");
return props;
}
public static void main(String[] args) {
// TODO Auto-generated method stub
JProducer producer = new JProducer();
producer.start();
//创建一个固定线程数量的线程池
ExecutorService executorService = Executors.newFixedThreadPool(MAX_THREAD_SIZE);
//提交任务
executorService.submit(new JProducerThread());
//关闭线程池
executorService.shutdown();
}
/**
* 实现一个单线程生产者客户端
*/
public void run() {
Producer<String, String> producer = new KafkaProducer<>(configure());
//发送100条json信息
for (int i = 0;i<100;i++) {
JSONObject json = new JSONObject();
try {
json.put("id", i);
json.put("ip", "192.168.0." +i);
json.put("date", new Date().toString());
} catch (JSONException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
String k = "key" +i;
//异步发送,调用回调函数
producer.send(new ProducerRecord<String,String>("test_kafka_game_x",k,
json.toString()),new Callback() {
public void onCompletion(RecordMetadata metadata,Exception e) {
if (e !=null) {
LOG.error("Send error,msg is "+e.getMessage());
}else {
LOG.info("The offset of the record we just sent is :" + metadata.offset());
}
}
});
}
try {
sleep(3000);
}catch (InterruptedException e) {
LOG.error("Interrupted thread error,msg is "+ e.getMessage());
}
producer.close();
}
}
二、多线程开发 消费者类
package com.ldy.bootv2.demo.jms;
import java.util.Arrays;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
public class JConsumerSubscribe extends Thread{
public static void main(String[] args) {
// TODO Auto-generated method stub
JConsumerSubscribe jconsumer = new JConsumerSubscribe();
jconsumer.start();
}
private Properties configure() {
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("group.id", "ke");
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
return props;
}
public void run() {
KafkaConsumer<String,String> consumer = new KafkaConsumer<>(configure());
consumer.subscribe(Arrays.asList("test"));
//实时消费标识
boolean flag = true;
while(flag) {
//获取主题消息数据
ConsumerRecords<String, String> records = consumer.poll(10000);
for (ConsumerRecord<String,String>record:records)
//循环打印消息
{ System.out.printf("offset= %d ,key = %s,value=%s%n",
record.offset(),record.key(),record.value());
try {
sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
//出现异常是,关闭消费者对象
consumer.close();
}
}