pom:
<properties>
<scala.version>2.11</scala.version>
<slf4j.version>1.7.21</slf4j.version>
<kafka.version>2.0.0</kafka.version>
<lombok.version>1.16.18</lombok.version>
<junit.version>4.11</junit.version>
<gson.version>2.2.4</gson.version>
<protobuff.version>1.5.4</protobuff.version>
<spark.version>2.3.1</spark.version>
</properties>
<!-- kafka-clients -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.1.0</version>
</dependency>
<!-- kafka -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.12</artifactId>
<version>2.1.0</version>
</dependency>
先写一个 kafka producer:
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Properties;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
/** */
public class ProducerFastStart {
public static final String brokerList = "localhost:9092";
public static final String topic = "topic-test-2";
//
public static String formatStringDateTime( Date date ) {
if ( date == null ) {
return "";
} else {
SimpleDateFormat df = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss" );
return df.format( date );
}
}
//
public static void main( String[] ar ) {
Properties properties = new Properties();
properties.put("key.serializer",
"org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer",
"org.apache.kafka.common.serialization.StringSerializer");
properties.put( "bootstrap.servers", "localhost:9092" ); // brokerList
// 生产者
KafkaProducer<String, String> producer =
new KafkaProducer<>(properties);
// 消息
ProducerRecord<String, String> record =
new ProducerRecord<>( topic, "订单 " + formatStringDateTime( new Date() ) );
//
for ( int i = 130; i < 200; i++ ) {
String data = "produce kafka message: " + i;
System.out.println( data );
// 发送消息
producer.send( record );
}
producer.close();
}
}
再写 kafka consumerGroup:
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/** */
public class ThirdMultiConsumerThreadDemo {
public static final String brokerList = "localhost:9092";
public static final String topic = "topic-test-2"; // 比如:订单 主题
public static final String groupId = "group.order"; // 订单组
//
public static Properties initConfig() {
Properties props = new Properties();
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
StringDeserializer.class.getName());
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
StringDeserializer.class.getName());
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
return props;
}
//
public static void main( String[] ar ) {
//
Properties props = initConfig();
int threadNumber = Runtime.getRuntime().availableProcessors();
KafkaConsumerThread consumerThread =
new KafkaConsumerThread( props, topic, threadNumber );
System.out.println( threadNumber + " --- 总线程数 " );
// 启动线程
consumerThread.start();
}
}
// 定义一个 consumer 线程,用多线程处理获取到的消息
class KafkaConsumerThread extends Thread {
// 消费者
private KafkaConsumer<String, String> consumer;
// 线程池
private static ExecutorService executorService;
// 线程数
private int threadNumber;
// 动态的接口名
private String[] interfaceNames = { "test spring", "test summer", "test autumn", "test winter" };
// 构造函数
public KafkaConsumerThread( Properties props, String topic, int threadNumberInput ) {
this.threadNumber = threadNumberInput;
consumer = new KafkaConsumer<>( props );
consumer.subscribe( Collections.singletonList( topic ) );
// 创建线程池
executorService = new ThreadPoolExecutor( threadNumber, threadNumber,
0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue<>( 1000 ),
new ThreadPoolExecutor.CallerRunsPolicy() );
}
//
public void run() {
try {
while ( true ) {
// 获取 MQ 里的消息
ConsumerRecords<String, String> records =
consumer.poll( Duration.ofMillis( 1000 ) );
if ( !records.isEmpty() ) {
//
//int threadNo = 1;
for ( int i = 0; i < threadNumber; i++ ) {
// 线程池 创建 两个不同的 线程 处理 消息
executorService.submit( new RecordsHandlerThread( records, interfaceNames[ i ] ) ); //threadNo ) );
//executorService.execute( new RecordsHandlerThread( records, threadNumber ) );
//threadNo++;
}
}
}
} catch ( Exception e ) {
e.printStackTrace();
} finally {
consumer.close();
}
}
}
// 定义一个 处理消息 的线程
class RecordsHandlerThread extends Thread {
// 订单的消息
public final ConsumerRecords<String, String> records;
//
public final String threadName;
//
public RecordsHandlerThread( ConsumerRecords<String, String> records, String threadName ) {
this.records = records;
this.threadName = threadName;
}
//
public void run() {
// 处理 消息
for ( ConsumerRecord<String, String> record : records ) {
//
System.out.println( "收到 MQ 里的消息 " + record.value()
+ " threadName = " + threadName );
}
}
}