生产者
import com.google.common.collect.Lists;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.junit.Test;
import java.util.List;
import java.util.Properties;
import static org.apache.kafka.clients.producer.ProducerConfig.*;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
/**
* kafka 发数据
*/
public class ProducerTest {
@Test
public void producer() {
Properties conf = new Properties();
conf.setProperty(BOOTSTRAP_SERVERS_CONFIG, "192.168.1.191:9092");
conf.put(ACKS_CONFIG, "all");
conf.put(KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
conf.put(VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
KafkaProducerproducer = new KafkaProducer<>(conf);
String toTopic = "test_lxk";
Listlist = Lists.newArrayList(
"message1",
"message2",
"message3",
"message4"
);
String key = null;
for (String value : list) {
ProducerRecordrecord = new ProducerRecord<>(toTopic, key, value);
try {
// 简单的直接send,在消费的时候,是消费不到的
//producer.send(record);
producer.send(record).get();
} catch (Exception e){
System.out.println(e.toString());
}
}
}
}
消费者:
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.junit.Test;
import java.util.Map;
import java.util.Properties;
import java.util.TreeMap;
import static org.apache.kafka.clients.consumer.ConsumerConfig.*;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
/**
* 消费数据
*/
public class ConsumerTest {
@Test
@SuppressWarnings("unchecked")
public void test() {
KafkaConsumerconsumer = new KafkaConsumer<>(conf());
consumer.subscribe(Lists.newArrayList("business_lxk_12", "business_lxk_34"));
try {
while (true){
ConsumerRecordsrecords = consumer.poll(100);
for (ConsumerRecordrecord : records) {
String key = record.key();
int partition = record.partition();
String topic = record.topic();
long offset = record.offset();
String value = record.value();
Map map = JsonUtils.parseJsonToObj(value, Map.class);
if (map != null) {
TreeMap treeMap = Maps.newTreeMap();
map.forEach(treeMap::put);
System.out.println(topic + " " +JsonUtils.parseObjToJson(treeMap));
}
}
}
} catch (Exception e){
System.out.println(e);
} finally {
consumer.close();
}
}
private static Properties conf() {
Properties conf = new Properties();
conf.setProperty(BOOTSTRAP_SERVERS_CONFIG, "192.168.1.191:9092");
conf.setProperty(GROUP_ID_CONFIG, "lxk");
conf.setProperty(ENABLE_AUTO_COMMIT_CONFIG, "true");
conf.setProperty(AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
conf.setProperty(AUTO_OFFSET_RESET_CONFIG, "latest");
conf.setProperty(KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
conf.setProperty(VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
return conf;
}
}