一、下载虚拟机,安装linux系统,这里我用的是centso
虚拟机自行百度
二、给linux安装java环境
1.去下载linux专用的JDK包,然后解压配置。
tar zxvf jdk-8u144-linux-x64.tar.gz
2.编辑jdk环境变量
执行 vi /etc/profile
编辑系统配置文件 配置java环境变量
export JAVA_HOME=/usr/local/jdk1.7.0_71
export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
export PATH=$JAVA_HOME/bin:$PATH
保存 Esc+:+wq
3.重新加载系统配置文件
source /etc/profile
4.检查java是否安装成功
java -version
安装成功!
三、安装kafka环境
1.配置linux下的kafka环境,下载kafka
2.在linux中解压kafka包
tar -xzf kafka_2.11-2.0.0.tgz
3.由于新版的kafka包自带zookeeper所以不必再下载zookeeper,直接启动kafka里的 zookeeper。
bin/zookeeper-server-start.sh config/zookeeper.properties
4.启动kafka
bin/kafka-server-start.sh config/server.properties
5.创建topic消息
bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic test
6.创建消费者
bin/kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic test --from-beginning
7.创建生产者
bin/kafka-console-producer.sh --broker-list localhost:9092 --topic test
四、java代码
生产者类 kafkaProducter
public class KafkaProducter {
private final KafkaProducer<String, String> producer;
public final static String TOPIC = "test";
private KafkaProducter(){
Properties properties = new Properties();
//此处配置的是kafka的端口
properties.put("bootstrap.servers", "192.168.0.108:9092");
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("request.required.acks","-1");
producer = new KafkaProducer<>(properties);
}
void produce() {
//发送100条消息
int messageNo = 100;
int count = 200;
while (messageNo < count) {
String key = String.valueOf(messageNo);
String data = "我终于成功了" + key;
long startTime = System.currentTimeMillis();
ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC, key, data);
producer.send(record, new DataCallback(startTime, data));
System.out.println(data);
messageNo++;
}
}
public static void main( String[] args )
{
new KafkaProducter().produce();
}
}
消费者类 KafkaConsumer
public class KafkaConsumer{
private final Consumer<String, String> consumer;
private KafkaConsumer() {
Properties props = new Properties();
props.put("bootstrap.servers", "192.168.0.108:9092");//服务器ip:端口号,集群用逗号分隔
props.put("group.id", "test-consumer-group");
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("session.timeout.ms", "30000");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
consumer = new KafkaConsumer<>(props);
consumer.subscribe(Arrays.asList("test"));
}
void consume() {
while(true){
ConsumerRecords<String, String> records = consumer.poll(100);
if (records.count() > 0) {
for (ConsumerRecord<String, String> record : records) {
String message = record.value();
System.out.println("从kafka接收到的消息是:" + message);
}
}
}
}
public static void main(String[] args) {
new KafkaConsumer().consume();
}
}
`