KAFKA Download URL
vim config/server.properties
broker.id=0
advertised.listeners=PLAINTEXT://localhost:9092
log.dirs=/home/kafka/logs
num.partitions=2
zookeeper.connect=localhost:2181
zkServer.sh start
ll /home/kafka/logs/
./bin/kafka-server-start.sh ./config/server.properties &
zkCli.sh
ls /
[cluster, controller, brokers, zookeeper, admin, isr_change_notification, p, controller_epoch, consumers, config]
kafka-server-stop.sh
zkServer.sh stop
Apache Kafka 入门 - Kafka-manager的基本配置和运行
unzip kafka-manager-1.3.0.4.zip
vim application.conf
kafka-manager.zkhosts="localhost:2181"
nohup bin/kafka-manager -Dconfig.file=conf/application.conf &
http://localhost:9000/
kafka-topics.sh --zookeeper localhost:2181 --list
kafka-console-producer.sh --broker-list 59.110.138.145 --topic test
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>cn.itcast.kafka</groupId>
<artifactId>kafka</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>war</packaging>
<properties>
<junit.version>4.12</junit.version>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.9</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.10</artifactId>
<version>0.8.2.1</version>
</dependency>
</dependencies>
</project>
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringEncoder;
public class KafkaProducer {
public static final String topic = "test";
public static void main(String[] args) throws InterruptedException {
Properties props = new Properties();
props.put("zookeeper.connect", "localhost:2181");
props.put("serializer.class", StringEncoder.class.getName());
props.put("metadata.broker.list", "localhost:9092");
props.put("request.required.acks", "1");
Producer<Integer, String> producer = new Producer<Integer, String>(new ProducerConfig(props));
for(int i=0;i<10;i++) {
producer.send(new KeyedMessage<Integer,String>(topic,"hello kafka"+i));
System.out.println("send message: " + "hello kafka" + i);
TimeUnit.SECONDS.sleep(1);
}
producer.close();
}
}
kafka-console-consumer.sh --zookeeper localhost --topic test --from-beginning
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;
public class kafkaConsumer {
public static final String topic = "test";
public static void main(String[] args) {
Properties props = new Properties();
// 声明zk
props.put("zookeeper.connect", "localhost:2181");
// 必须要使用别的组名称, 如果生产者和消费者都在同一组,则不能访问同一组内的topic数据
props.put("group.id", "group1");
// zk连接超时
props.put("zookeeper.session.timeout.ms", "4000");
props.put("zookeeper.sync.time.ms", "200");
props.put("auto.commit.interval.ms", "1000");
props.put("auto.offset.reset", "smallest");
// 序列化类
props.put("serializer.class", "kafka.serializer.StringEncoder");
ConsumerConfig conf = new ConsumerConfig(props);
ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(conf);
Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
topicCountMap.put(topic, new Integer(1));
StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());
Map<String, List<KafkaStream<String, String>>> consumerMap = consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);
KafkaStream<String, String> stream = consumerMap.get(topic).get(0);
ConsumerIterator<String, String> it = stream.iterator();
while (it.hasNext()) {
System.out.println( it.next().message());
}
}
}