1、API创建主题
1.1、java实现
package KafkaDay02;
import kafka.utils.ZkUtils;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.common.security.JaasUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
public class _07HomeWork01 {
public static void main(String[] args) {
Properties pro = new Properties();
pro.setProperty("bootstrap.servers","qianfeng01:9092,qianfeng02:9092,qianfeng03:9092");
AdminClient adminClient = AdminClient.create(pro);
List<NewTopic> topic = new ArrayList<>();
NewTopic newTopic = new NewTopic("test-ex1", 4, (short) 3);
topic.add(newTopic);
CreateTopicsResult topics = adminClient.createTopics(topic);
try {
topics.all().get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
}
1.2、scala实现
package com.xxx.Kafka.Day02
import java.util
import java.util.Properties
import org.apache.kafka.clients.admin.{AdminClient, CreateTopicsResult, NewTopic}
object _01HomeWork01 {
def main(args: Array[String]): Unit = {
val pro = new Properties()
pro.put("bootstrap.servers", "qianfeng01:9092,qianfeng02:9092,qianfeng03:9092")
val client: AdminClient = AdminClient.create(pro)
val newTopic = new NewTopic("test-ex3", 4, 3)
val topics = new util.ArrayList[NewTopic]()
topics.add(newTopic)
val result: CreateTopicsResult = client.createTopics(topics)
client.close()
}
}
2、API创建生产者
2.1、java实现
package KafkaDay02;
import kafka.Kafka;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.io.IOException;
import java.util.Properties;
public class _08HomeWork2 {
public static void main(String[] args) throws IOException {
Properties pro = new Properties();
pro.load(_08HomeWork2.class.getClassLoader().getResourceAsStream("producer.properties"));
KafkaProducer<Integer, String> producer = new KafkaProducer<>(pro);
for (int i = 0 ; i < 10000; i++){
ProducerRecord<Integer, String> message = new ProducerRecord<Integer, String>("test-ex1", "homework"+i);
producer.send(message);
}
producer.close();
}
}
2.2、scala实现
package com.xxx.Kafka.Day02
import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
object _02HomeWork02 {
def main(args: Array[String]): Unit = {
val pro = new Properties()
pro.load(_02HomeWork02.getClass.getClassLoader.getResourceAsStream("producer.properties"))
val producer = new KafkaProducer[Int, String](pro)
for ( i <- 1 to 10000){
val message = new ProducerRecord[Int, String]("test-ex1", "hello" + i)
producer.send(message)
}
producer.close()
}
}
3、API创建消费者
3.1、java实现
package KafkaDay02;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
public class _09HomeWork03 {
public static void main(String[] args) throws IOException {
Properties pro = new Properties();
pro.load(_09HomeWork03.class.getClassLoader().getResourceAsStream("consumer.properties"));
KafkaConsumer<Integer, String> consumer = new KafkaConsumer<>(pro);
List<String> topic = new ArrayList<>();
topic.add("test-ex1");
consumer.subscribe(topic);
while(true){
ConsumerRecords<Integer, String> message = consumer.poll(1000);
Iterator<ConsumerRecord<Integer, String>> ite = message.iterator();
while(ite.hasNext()){
ConsumerRecord<Integer, String> mess = ite.next();
System.out.println("主题"+mess.topic()+" 分区"+mess.partition()+" 值"+mess.value()+" 偏移量"+mess.offset());
}
}
}
}
3.2、scala实现
package com.xxx.Kafka.Day02
import java.util
import java.util.{Collections, Properties}
import org.apache.kafka.clients.consumer.{ConsumerRecord, ConsumerRecords, KafkaConsumer}
object _03HomeWork03 {
def main(args: Array[String]): Unit = {
val pro = new Properties()
pro.load(_03HomeWork03.getClass.getClassLoader.getResourceAsStream("consumer.properties"))
val consumer = new KafkaConsumer[Int, String](pro)
consumer.subscribe(Collections.singletonList("test-ex1"))
while (true) {
val message: ConsumerRecords[Int, String] = consumer.poll(1000)
val ite: util.Iterator[ConsumerRecord[Int, String]] = message.iterator()
while (ite.hasNext) {
val messa: ConsumerRecord[Int, String] = ite.next()
println(s"topic:${messa.topic()},partiton:${messa.partition()},value:${messa.value()},offset:${messa.offset()}")
}
}
}
}
4、自定义分区器
4.1、随机分区器
package KafkaDay02;
import org.apache.kafka.clients.producer.Partitioner;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.PartitionInfo;
import java.util.Map;
public class _03CustomRandomPartitioner implements Partitioner {
@Override
public int partition(String topic, Object key, byte[] keybytes, Object value, byte[] valuebytes, Cluster cluster) {
Integer partitionCount = cluster.partitionCountForTopic(topic);
int partitonNumber = (int) (Math.random() * partitionCount);
return partitonNumber;
}
@Override
public void close() {
}
@Override
public void configure(Map<String, ?> map) {
}
}
4.2、Hash分区器
package KafkaDay02;
import org.apache.kafka.clients.producer.Partitioner;
import org.apache.kafka.common.Cluster;
import java.util.Map;
public class _04CustomHashPartitioner implements Partitioner {
@Override
public int partition(String topic, Object key, byte[] keybytes, Object value, byte[] valuebytes, Cluster cluster) {
Integer PartitionCount = cluster.partitionCountForTopic(topic);
int PartitionNumber = key.hashCode() % PartitionCount;
return PartitionNumber;
}
@Override
public void close() {
}
@Override
public void configure(Map<String, ?> map) {
}
}
4.3、轮询分区器
package KafkaDay02;
import org.apache.kafka.clients.producer.Partitioner;
import org.apache.kafka.common.Cluster;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
public class _05CustomPollingPartitioner implements Partitioner {
private AtomicInteger atomicInteger = new AtomicInteger();
@Override
public int partition(String topic, Object key, byte[] keybytes, Object value, byte[] valuebytes, Cluster cluster) {
Integer PartitionCount = cluster.partitionCountForTopic(topic);
int andIncrement = atomicInteger.getAndIncrement();
int PartitionNumber = andIncrement % PartitionCount;
return PartitionNumber;
}
@Override
public void close() {
}
@Override
public void configure(Map<String, ?> map) {
}
}