1,集群配置:
[kafka/config/server.properties]
...
broker.id=202
...
listeners=PLAINTEXT://:9092
...
log.dirs=/home/centos/kafka/logs
...
zookeeper.connect=dataone:2181,datatwo:2181,datathree:2181
分发server.properties,同时修改每个文件的broker.id
2,启动kafka
$>bin/kafka-server-start.sh config/server.properties
3,创建主题
$>kafka-topics.sh --create --zookeeper datasix:2181 --replication-factor 3 --partitions 3 --topic air
4,查看主题列表
$>bin/kafka-topics.sh --list --zookeeper dataone:2181
5, 启动控制台生产者
$>bin/kafka-console-producer.sh --broker-list s202:9092 --topic air
6,启动控制台消费者
$>bin/kafka-console-consumer.sh --bootstrap-server datatwo:9092 --topic air --from-beginning --zookeeper datasix:2181
kafka-console-consumer.sh --bootstrap-server datatwo:9092 --topic air --from-beginning --zookeeper dataone:2181
7,java代码发送消息
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.10.0.1</version>
</dependency>
-----------------------------------------------------------------
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import java.util.Properties;
----------------------------------------------------------------------
Properties prpos = new Properties();
prpos.put("metadata.broker.list", "datatwo:9092");
prpos.put("serializer.class", "kafka.serializer.StringEncoder");
prpos.put("request.required.acks", "1");
Producer<Integer, String> producer = new Producer<Integer, String>(new ProducerConfig(prpos));
KeyedMessage<Integer, String> data = new KeyedMessage<Integer, String>("air", msg);
producer.send(data);
java代码:消费消息:
消费消息
Properties props = new Properties();
props.put("zookeeper.connect", "dataone:2181");
props.put("group.id", "wangjk");
props.put("zookeeper.session.timeout.ms", "500");
props.put("zookeeper.sync.time.ms", "250");
props.put("auto.commit.interval.ms", "1000");
props.put("auto.offset.reset", "smallest");
//创建消费配置对象
Map map = new HashMap();
map.put("air",new Integer(1));
Map<String,List<KafkaStream<byte[],byte[]>>> messageStreams = Consumer.createJavaConsumerConnector(new ConsumerConfig(props)).createMessageStreams(map);
List<KafkaStream<byte[], byte[]>> msgList = messageStreams.get("air");
for(KafkaStream<byte[],byte[]> stream : msgList){
ConsumerIterator<byte[],byte[]> it = stream.iterator();
while(it.hasNext()){
byte[] message = it.next().message();
System.out.println(new String(message));
}
}
}