1、java和scala版本的kafka adminclient去创建topic主题?
scala版本
import java.util
import java.util.Properties
import org.apache.kafka.clients.admin.{AdminClient, NewTopic}
object TopicDemo extends App {
private val properties = new Properties()
properties.setProperty("bootstrap.servers", "mypc01:9092,mypc02:9092,mypc03:9092")
properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
properties.setProperty("enable.auto.commit", "true")
private val adminclient: AdminClient = AdminClient.create(properties)
//创建topic
private val testtopic = new NewTopic("test", 2, 3)
//创建topic列表
private val list = new util.ArrayList[NewTopic]()
list.add(testtopic)
//提交创建topic
adminclient.createTopics(list)
adminclient.close()
}
java版本
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.NewTopic;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
public class TopicDemo1 {
public static void main(String[] args) {
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", "mypc01:9092,mypc02:9092,mypc03:9092");
properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
properties.setProperty("enable.auto.commit", "true");
AdminClient adminClient = AdminClient.create(properties);
NewTopic test1 = new NewTopic("test1", 2, (short) 3);
List<NewTopic> newTopics = new ArrayList<>();
newTopics.add(test1);
adminClient.createTopics(newTopics);
adminClient.close();
}
}
2、 java和scala版本的producer循环插入10000条数据到test-ex1的topic中
object ProduceHomework extends App {
private val prop = new Properties()
//加载生产者的配置文件
prop.load(ProduceHomework.getClass.getClassLoader.getResourceAsStream("producer.properties"))
//获取生产者对象
private val producer = new KafkaProducer[Integer, String](prop)
//获取记录
for(x<- 1 to 10000){
val message: ProducerRecord[Integer, String] = new ProducerRecord("test-ex1", x, "test"+x)
//发送记录
producer.send(message)
}
3、java和scala版本的consumer消费test-ex1的topic中的数据
import org.apache.kafka.clients.consumer.{ConsumerRecords, KafkaConsumer}
object ConsumerHomework extends App {
val properties = new Properties()
//加载消费者的配置文件
properties.load(ConsumerHomework.getClass.getClassLoader.getResourceAsStream("consumer.properties"))
val consumer = new KafkaConsumer[Integer, String](properties)
//订阅topic
consumer.subscribe(util.Arrays.asList("test"))
import scala.collection.JavaConversions._
while (true) {
//拉取数据,必须写在循环里,因为默认一次拉取500条,之后可以遍历获取每条消息
val records: ConsumerRecords[Integer, String] = consumer.poll(1000)
var count = 0
for (record <- records) {
count = count + 1
val key: Integer = record.key()
val value: String = record.value()
val partition: Int = record.partition()
val offset: Long = record.offset()
val topic: String = record.topic()
println(s"topic:$topic key: $key value: $value partition: $partition offset: $offset ")
}
}
}
总结
- 注意poll数据要写在循环里,否则一次默认只拉取500条,如上面创建了1w条数据,就不能拉取全部数据了~