“alt+p”将“kafka_2.11-0.11.0.2.tgz”包拖入
解压包到app下—“tar -zxvf kafka_2.11-0.11.0.2.tgz -C app/”
进入“appkafka_2.11-0.11.0.2/config/”路径下,修改“server.properties”文件中的“zookeeper.connect”语句为“zjgm01:2181,zjgm02:2181,zjgm03:2181”,如下图
分别将app下的“”文件复制到“zjgm02”,“zjgm03”的app下
“scp -r kafka_2.11-0.11.0.2 zjgm02:/home/hadoop/app/”
“scp -r kafka_2.11-0.11.0.2 zjgm03:/home/hadoop/app/”
分别进入“zjgm02”,“zjgm03”的“app/appkafka_2.11-0.11.0.2/config”路径下,修改“server.properties”文件中的“broker.id”为“1”,“2”,如下图
分别将“zjgm01”,“zjgm02”,“zjgm03”的zookeeper集群启动—“./zkServer.sh start”
分别进入“zjgm01”,“zjgm02”,“zjgm03”的“app/kafka_2.11-0.11.0.2/”路径下,启动broker—“bin/kafka-server-start.sh config/server.properties”
在kafka集群中创建一个topic—“bin/kafka-topics.sh --create --zookeeper zjgm01:2181 --replication-factor 3 --partitions 1 --topic dsj”
用一个producer向某一个topic中写入消息—“bin/kafka-console-producer.sh --broker-list zjgm01:9092 --topic dsj”
打开idea,新建项目“kafka”,结构如下
“ProducerDemo.java”
package com.zhongruan.kafka;
import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
public class ProducerDemo {
public static void main(String[] args) throws InterruptedException {
Properties props=new Properties();
props.put("zk.connect", "zjgm01:2181,zjgm02:2181,zjgm03:2181");
props.put("metadata.broker.list","zjgm01:9092,zjgm02:9092,zjgm03:9092");
props.put("serializer.class", "kafka.serializer.StringEncoder");
ProducerConfig conf=new ProducerConfig(props);
Producer<String, String> producer = new Producer<>(conf);
/*producer.send(new KeyedMessage<String, String>("dsj","xyb1111......."));*/
for(int i=0;i<10;i++){
Thread.sleep(2000);
producer.send(new KeyedMessage<String, String>("dsj","xyb xue xi"+ i +"ci"));
}
}
}
“ConsumerDemo.java”
package com.zhongruan.kafka;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
public class ConsumerDemo {
public static void main(String[] args) {
Properties props=new Properties();
props.put("zookeeper.connect", "zjgm01:2181,zjgm02:2181,zjgm03:2181");
props.put("group.id","111");
props.put("auto.offset.reset", "smallest");
ConsumerConfig config=new ConsumerConfig(props);
ConsumerConnector consumer=Consumer.createJavaConsumerConnector(config);
Map<String,Integer> topicMap=new HashMap<>();
topicMap.put("dsj",1);
Map<String, List<KafkaStream<byte[],byte[]>>> streamsMap=consumer.createMessageStreams(topicMap);
List<KafkaStream<byte[],byte[]>> streams=streamsMap.get("dsj");
for (KafkaStream<byte[],byte[]> kafkaStream:streams){
new Thread(new Runnable() {
@Override
public void run() {
for (MessageAndMetadata<byte[],byte[]> mm:kafkaStream){
String s=new String(mm.message());
System.out.println(s);
}
}
}).start();
}
}
}
idea先运行“ProducerDemo.java”,后运行“ConsumerDemo.java”
用一个comsumer从某一个topic中读取信息—“bin/kafka-console-consumer.sh --zookeeper zjgm01:2181 --from-beginning --topic dsj”