软件版本
- jdk-8u91-linux-x64.tar.gz
- kafka_2.11-1.0.0.tgz
- zookeeper-3.4.10.tar.gz
环境配置
- Centos 7 +VirtualBox + JDK安装参见:https://mp.csdn.net/postedit/78737523
- Kafka单机版安装参见:https://mp.csdn.net/postedit/78895893
- zookeeper-3.4.10.tar.gz安装参见:https://mp.csdn.net/mdeditor/78894715
运行zookeeper
- 切换root账户:su root
- 切换到zookeeper安装目录下,执行:./bin/zkServer.sh start
运行kafka
- 切换到kafka安装目录下,执行:nohup ./bin/kafka-server-start.sh config/server.properties &
运行Producer、Comsumer测试kafka
- 运行Producer:bin/kafka-console-producer.sh --broker-list localhost:9092 --topic test
- 运行Consumer:bin/kafka-console-consumer.sh --zookeeper localhost:2181 --topic test --from-beginning
编写Producer程序
- 第一个简单程序(https://www.cnblogs.com/gaopeng527/p/4959633.html)
import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
public class KafkaWordCountProducer {
public static void main(String[] args) {
Properties props = new Properties();
props.setProperty("metadata.broker.list","localhost:9092");
props.setProperty("serializer.class","kafka.serializer.StringEncoder");
props.put("request.required.acks","1");
ProducerConfig config = new ProducerConfig(props);
//创建生产这对象
Producer<String, String> producer = new Producer<String, String>(config);
//生成消息
KeyedMessage<String, String> data1 = new KeyedMessage<String, String>("top1","test kafka");
KeyedMessage<String, String> data2 = new KeyedMessage<String, String>("top2","hello world");
try {
int i =1;
while(i < 100){
//发送消息
producer.send(data1);
producer.send(data2);
i++;
Thread.sleep(1000);
}
} catch (Exception e) {
e.printStackTrace();
}
producer.close();
}
}
pom.xml 添加如下:
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>1.1.0</version>
</dependency>
打包,直接运行java -cp xxx.jar xxx.xxx.xxx(需要运行的类),如果没有报错,在命令行运行kafka Consumer,比如:
bin/kafka-console-consumer.sh --zookeeper localhost:2181 --topic top1 --from-beginning
输出10次:test kafka
2. 第二个例子:直接从命令行读取消息
import java.util.Properties;
import java.util.Scanner;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
public class KafkaWordCountProducerTop {
public static void main(String[] args) {
Properties props = new Properties();
props.setProperty("metadata.broker.list","localhost:9092");
props.setProperty("serializer.class","kafka.serializer.StringEncoder");
props.put("request.required.acks","1");
ProducerConfig config = new ProducerConfig(props);
//创建生产这对象
Producer<String, String> producer = new Producer<String, String>(config);
try {
Scanner scan = new Scanner(System.in);
int i =1;
while(scan.hasNextLine()){
String inputstr = scan.nextLine();
if(inputstr.equalsIgnoreCase("end")){
break;
}
//生成消息
KeyedMessage<String, String> data1 = new KeyedMessage<String, String>(args[0],i++ +" "+inputstr);
//发送消息
producer.send(data1);
}
} catch (Exception e) {
e.printStackTrace();
}
producer.close();
}
}
编写Consumer程序
import java.util.Arrays;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
public class KafkaConsumerlearn {
public static void main(String[] args) {
Properties props = new Properties();
props.put("bootstrap.servers", "192.168.56.109:9092"); //主机:端口列表,用来和kafka集群建立初始连接
props.put("group.id", "test"); //用来对消费者进行分组,值必须是唯一
props.put("enable.auto.commit", "true"); //true表示消费者偏移量将周期性的被提交
props.put("auto.commit.interval.ms", "1000"); //偏移量提交的频率,单位毫秒
props.put("session.timeout.ms", "30000"); //心跳检测超时时间
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); //反序列化器
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer consumer = new KafkaConsumer(props);
consumer.subscribe(Arrays.asList("top1"));
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord<String, String> record : records)
System.out.printf("offset = %d, key = %s, value = %s\n", record.offset(), record.key(), record.value());
}
}
}
(https://blog.csdn.net/m0_37739193/article/details/78396773)
测试:启动producer
在eclipse中直接执行(本人用eclipse luna+maven进行编码的)
以上就是kafka简单的使用。