一:生产者
import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
public class producerTest {
public static void main(String[] args) {
Properties props = new Properties();
//zookeeper连接服务器地址
props.put("zk.connect", "localhost:2181");
//指定序列化处理类,默认为kafka.serializer.DefaultEncoder
props.put("serializer.class", "kafka.serializer.StringEncoder");
//指定kafka节点列表,用于获取metadata,不必全部指定
props.put("metadata.broker.list", "localhost:9092");
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
System.out.println("开始发送消息--------------------------");
for (int i = 0; i < 100; i++){
//topic: topic
//message: "哈哈" + i+"次"
producer.send(new KeyedMessage<String, String>("topic", "hehe" + i+"次"));
}
System.out.println("发送消息结束--------------------------");
producer.close();
}
}
二:消费者
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;
public class ConsumerTest {
private final ConsumerConnector consumer;
private ConsumerTest() {
Properties props = new Properties();
//zookeeper 配置
props.put("zookeeper.connect", "ip1:2181,ip2:2181,ip3:2181");
//指定消费组
props.put("group.id", "sf-group");
//zookeeper的session过期时间,默认5000ms,用于检测消费者是否挂掉,
//当消费者挂掉,其他消费者要等该指定时间才能检查到并且触发重新负载均衡
props.put("zookeeper.session.timeout.ms", "4000");
//指定多久消费者更新offset到zookeeper中。注意offset更新时基于time而不是每次获得的消息。
//一旦在更新zookeeper发生异常并重启,将可能拿到已拿到过的消息
props.put("zookeeper.sync.time.ms", "200");
//自动更新时间。默认60*1000
props.put("auto.commit.interval.ms", "1000");
//如果zookeeper没有offset值或offset值超出范围。那么就给个初始的offset。有smallest、largest、
//anything可选,分别表示给当前最小的offset、当前最大的offset、抛异常。默认largest
props.put("auto.offset.reset", "smallest");
//指定序列化处理类,默认为kafka.serializer.DefaultEncoder
props.put("serializer.class", "kafka.serializer.StringEncoder");
ConsumerConfig config = new ConsumerConfig(props);
consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);
}
void consume() {
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put("fanbingbingbitch", new Integer(1));
StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());
Map<String, List<KafkaStream<String, String>>> consumerMap =
consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);
KafkaStream<String, String> stream = consumerMap.get("topic").get(0);
ConsumerIterator<String, String> it = stream.iterator();
while (it.hasNext()){
System.out.println(it.next().message());
}
}
public static void main(String[] args) {
new ConsumerTest().consume();
}
}