kafka_03_topic_dml_api

kafka topic DML

package com.soul.kafka.level03;

import org.apache.kafka.clients.admin.*;
import org.apache.kafka.common.KafkaFuture;

import java.util.*;
import java.util.concurrent.ExecutionException;

public class _03TopicDML {

    public static void main(String[] args) throws ExecutionException, InterruptedException {
        //配置连接参数
        Properties props = new Properties();
        //配置主机名-IP映射解析, C:\Windows\System32\drivers\etc\hosts
        /*
#kafka node begin
192.168.25.106 kafka01
192.168.25.107 kafka02
192.168.25.108 kafka03
#kafka node end
         */
        props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
                "kafka01:9092,kafka02:9092,kafka03:9092");

        KafkaAdminClient adminClient = (KafkaAdminClient) KafkaAdminClient.create(props);

        //删
        //delete(adminClient, "topic01", "topic02");
        delete(adminClient, "topic02");

        //等待删除完成
        Thread.sleep(2000);

        //增
        create(adminClient, "topic02");

        //查
        query(adminClient);

        //查, 详情
        queryDetail(adminClient, "topic02");

        adminClient.close();
    }

    private static void query(KafkaAdminClient adminClient) throws InterruptedException, ExecutionException {
        //查询topics
        KafkaFuture<Set<String>> nameFutures = adminClient.listTopics().names();
        System.out.println("-------- query begin --------");
        for (String name : nameFutures.get()) {
            System.out.println(name);
        }
        System.out.println("-------- query end --------");
    }

    private static void queryDetail(KafkaAdminClient adminClient, String... topicName) throws InterruptedException,
            ExecutionException {
        //查看Topic详情
        DescribeTopicsResult describeTopics =
                adminClient.describeTopics(Arrays.asList(topicName));
        Map<String, TopicDescription> tdm = describeTopics.all().get();
        System.out.println("-------- query detail begin --------");
        for (Map.Entry<String, TopicDescription> entry : tdm.entrySet()) {
            System.out.println(entry.getKey() + "\t" + entry.getValue());
        }
        System.out.println("-------- query detail end --------");
    }

    private static void delete(KafkaAdminClient adminClient, String... topicNameList) {
        //删除Topic
        adminClient.deleteTopics(Arrays.asList(topicNameList));
    }

    private static void create(KafkaAdminClient adminClient, String topicName) {
        //分别指定 topic名称, 分区数, 副本因子
        List<NewTopic> newTopics = Arrays.asList(new NewTopic(topicName, 2, (short) 3));
        //创建Topics
        //同步
        //adminClient.createTopics(newTopics);

        //异步
        CreateTopicsResult createTopicsResult = adminClient.createTopics(newTopics);
        //调用下面方法会将异步创建改成同步创建
        try {
            createTopicsResult.all().get();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (ExecutionException e) {
            e.printStackTrace();
        }
    }


}

kafka consumer

package com.soul.kafka.level03;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Iterator;
import java.util.Properties;
import java.util.regex.Pattern;

//allow parallel run
public class _04KafkaConsumerDML {
    //先启动消费者, 再启动生产者
    public static void main(String[] args) {
        //1.创建Kafka链接参数
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka01:9092,kafka02:9092,kafka03:9092");
        //配置key的反序列化方式
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        //配置value的反序列化方式
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        //配置消费组
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "group01");

        //2.创建Topic消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        //3.订阅topic开头的消息队列
        //匹配指定topic
        //consumer.subscribe(Pattern.compile("topic02"));
        //正则匹配
        consumer.subscribe(Pattern.compile("^topic.*$"));

        //kafka仅仅保证单个procedure发送有序, 不保证消费有序
        while (true) {
            //每隔 1秒 拉取一条消息
            ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofSeconds(1));
            Iterator<ConsumerRecord<String, String>> recordIterator = consumerRecords.iterator();
            while (recordIterator.hasNext()) {
                ConsumerRecord<String, String> record = recordIterator.next();
                String key = record.key();
                String value = record.value();
                long offset = record.offset();
                int partition = record.partition();
                System.out.println("kafkaKey:" + key + ", kafkaVal:" + value
                        + ", partition:" + partition + ", offset:" + offset);
            }
        }
    }
}

kafka producer

package com.soul.kafka.level03;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;

public class _05KafkaProducerDML {
    //先启动消费者, 再启动生产者
    public static void main(String[] args) throws InterruptedException {
        //1.创建链接参数
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka01:9092,kafka02:9092,kafka03:9092");
        //配置key的序列化方式
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        //配置value的序列化方式
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        //2.创建生产者
        KafkaProducer<String, String> producer = new KafkaProducer<>(props);

        //3.封账消息队列
        for (int i = 0; i < 30; i++) {
            Thread.sleep(100);
            //生产消息
            //注意需要已存在的Topic
            ProducerRecord<String, String> record = new ProducerRecord<>("topic02", "K" + i, "V" + i);
            //发送消息
            producer.send(record);
        }

        //关闭生产者
        producer.close();
    }
}

kafka consumer partition

package com.soul.kafka.level03;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import java.util.regex.Pattern;

public class _06KafkaConsumerPartition {
    //先启动消费者, 再启动生产者
    public static void main(String[] args) {
        //1.创建Kafka链接参数
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka01:9092,kafka02:9092,kafka03:9092");
        //配置key的反序列化方式
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        //配置value的反序列化方式
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

        //2.创建Topic消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        //指定分区消费, 不配置消费组
        //props.put(ConsumerConfig.GROUP_ID_CONFIG, "group01");
        List<TopicPartition> partitions = Arrays.asList(new TopicPartition("topic02", 0));
        consumer.assign(partitions);
        //指定消费分区的位置, 从头开始seek消费
        consumer.seekToBeginning(partitions);
        //可以发现只会消费指定分区的数据
        //kafkaKey:kafkaKey1, kafkaValue:kafkaValue1,partition:0,offset:1

        //kafka仅仅保证单个procedure发送有序, 不保证消费有序
        while (true) {
            //每隔 1秒 拉取一条消息
            ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofSeconds(1));
            Iterator<ConsumerRecord<String, String>> recordIterator = consumerRecords.iterator();
            while (recordIterator.hasNext()) {
                ConsumerRecord<String, String> record = recordIterator.next();
                String key = record.key();
                String value = record.value();
                long offset = record.offset();
                int partition = record.partition();
                System.out.println("kfkKey:" + key + ", kfkVal:" + value
                        + ", partition:" + partition + ", offset:" + offset);
            }
        }
    }
}

pom.xml

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.soul</groupId>
    <artifactId>kafka</artifactId>
    <version>0.0.1</version>
    <name>kafka</name>

    <properties>
        <java.version>1.8</java.version>
    </properties>

    <dependencies>
        <!-- kafka begin -->

        <!--https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients-->
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>2.2.0</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/log4j/log4j -->
        <dependency>
            <groupId>log4j</groupId>
            <artifactId>log4j</artifactId>
            <version>1.2.17</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-api</artifactId>
            <version>1.7.25</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-log4j12 -->
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-log4j12</artifactId>
            <version>1.7.25</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
            <version>3.9</version>
        </dependency>

        <!-- kafka end -->
        
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <configuration>
                    <source>8</source>
                    <target>8</target>
                </configuration>
            </plugin>
        </plugins>
    </build>

</project>

log4j.properties

log4j.rootLogger = info,console

log4j.appender.console = org.apache.log4j.ConsoleAppender
log4j.appender.console.Target = System.out
log4j.appender.console.layout = org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern =  %p %d{yyyy-MM-dd HH:mm:ss} %c - %m%n

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值