1.生产者
package com.ucmed.formalWork.demo.kafka;
import org.apache.kafka.clients.producer.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
/**
* Created by ZY-LJ-1446 on 2019/1/8.
*/
public class Producer {
private static final Logger LOG = LoggerFactory.getLogger(Producer.class);
private static final String TOPIC = "milo3";
private static final String BROKER_LIST = "192.168.32.153:9092";
private static KafkaProducer<String,String> producer = null;
/**
初始化生产者
*/
static {
Properties configs = initConfig();
producer = new KafkaProducer<String, String>(configs);
}
/**
初始化配置
*/
private static Properties initConfig(){
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,BROKER_LIST);
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());
return properties;
}
public static void main(String[] args) throws InterruptedException {
//消息实体
ProducerRecord<String , String> record = null;
for (int i = 0; i < 5; i++) {
record = new ProducerRecord<String, String>(TOPIC, String.valueOf(i+1),String.valueOf(i+1));
//发送消息
producer.send(record, new Callback() {
@Override
public void onCompletion(RecordMetadata recordMetadata, Exception e) {
if (null != e){
LOG.info("send error" + e.getMessage());
}else {
LOG.info(String.format("offset:%s,partition:%s",recordMetadata.offset(),recordMetadata.partition()));
}
}
});
}
producer.flush();
producer.close();
}
}
2.消费者
package com.ucmed.formalWork.demo.kafka;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.Properties;
/**
* Created by ZY-LJ-1446 on 2019/1/8.
*/
public class Consumer {
private static final Logger LOG = LoggerFactory.getLogger(Consumer.class);
private static final String TOPIC = "milo2";
private static final String BROKER_LIST = "192.168.32.153:9092";
private static KafkaConsumer<String,String> consumer = null;
static {
Properties configs = initConfig();
consumer = new KafkaConsumer<String, String>(configs);
}
private static Properties initConfig(){
Properties properties = new Properties();
properties.put("bootstrap.servers",BROKER_LIST);
properties.put("group.id","0");
properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.setProperty("enable.auto.commit", "true");
properties.setProperty("auto.offset.reset", "earliest");
//properties.put("")
return properties;
}
public static void main(String[] args) {
consumer.subscribe(Arrays.asList("milo3"));//订阅topic,可以订阅多个"test","test2"
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord<String, String> record : records) {
LOG.info("offset:"+record.offset()+" key:"+record.key()+" value:"+record.value());
}
}
}
}