Kafka-Consumer(Java & Scala)

本文使用java api来实现 kafka消费,配合服务器生产消息/Java代码生产消息

Kafka-Producer JAVA & Scala 实现

Maven 依赖

		<dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>0.10.0.1</version>
        </dependency>

Java版

配置类

package kafka;

/**
 *  Kafka常用配置
 */
public class KafkaPro {
    public final static String ZK = "192.165.1.82:2181";
    public final static String TOPIC = "java_kafka";
    // 指定broker的地址清单,地址格式为 host : port
    public final static String BROKER_LIST = "192.165.1.82:9092";

    public final static String GROUP_ID = "Kafka_API";
}

消费者

package kafka;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;


import java.util.Arrays;
import java.util.Properties;

public class CustomConsumer  extends Thread{

    private String topic;
    public CustomConsumer(String topic){
        this.topic = topic;
    }
    private KafkaConsumer ConsumerCreate (String topic){
        Properties props = new Properties();
        // Kafka 服务器列表
        props.put("bootstrap.servers",KafkaPro.BROKER_LIST);
        // 组ID
        props.put("group.id",KafkaPro.GROUP_ID);
        // 开启自动提交offset
        props.put("enable.auto.commit","true");
        // 每1000 ms 提交一次 offset,必须开启自动提交offset
        props.put("auto.commit.interval.ms","1000");

        // 使用该类将值对象序列化为字节数组
        props.put("key.deserializer",   "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
        //订阅主题
        consumer.subscribe(Arrays.asList(topic));
        return  consumer;
    }

    @Override
    public void run() {
        KafkaConsumer consumer = ConsumerCreate(topic);
        while (true){
            ConsumerRecords<String, String> records =consumer.poll(100);
            for( ConsumerRecord<String, String> record: records){
                System.out.printf("offset = %d,key = %s ,value = %s%n",record.offset(),record.key(),record.value());
            }
        }
    }
}

测试类

package kafka;
/*
* Kafka Java 测试
* */

public class apptest {
    public static void main(String[] args) {
        new CustomConsumer(KafkaPro.TOPIC).start();
    }
}

服务器端启动消息生产

[root@node1 config]# kafka-console-producer.sh --broker-list localhost:9092 --topic java_kafka
>test

# 窗口显示如下
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/C:/Users/host/.m2/repository/org/slf4j/slf4j-log4j12/1.7.16/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/C:/Users/host/.m2/repository/org/apache/logging/log4j/log4j-slf4j-impl/2.4.1/log4j-slf4j-impl-2.4.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
offset = 39880380,key = null ,value = test

联合测试

package kafka;
/*
* Kafka Java 测试
* */

public class apptest {
    public static void main(String[] args) {
        new KafkaProduct(KafkaPro.TOPIC).start();
        new CustomConsumer(KafkaPro.TOPIC).start();
    }
}

IDEA窗口显示如下:

SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/C:/Users/host/.m2/repository/org/slf4j/slf4j-log4j12/1.7.16/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/C:/Users/host/.m2/repository/org/apache/logging/log4j/log4j-slf4j-impl/2.4.1/log4j-slf4j-impl-2.4.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
Send: message1
Send: message2
Send: message3
Send: message4
offset = 39880381,key = null ,value = message1
offset = 39880382,key = null ,value = message2
offset = 39880383,key = null ,value = message3
offset = 39880384,key = null ,value = message4
Send: message5
offset = 39880385,key = null ,value = message5
Send: message6
offset = 39880386,key = null ,value = message6
Send: message7
offset = 39880387,key = null ,value = message7

Scala版

配置类

package com.kafka

object KafkaProperties {
   final val ZK = "192.165.1.82:2181"
   final val TOPIC = "scala_kafka2"
   final val BROKER_LIST = "192.165.1.82:9092"
   final val GROUP_ID="Kafka_API"
}

消费者

package com.kafka

import java.util.Properties
import java.util
import java.util.{Arrays, Properties}
import org.apache.kafka.clients.consumer.{ConsumerRecords, KafkaConsumer}

class CustomConsumer {
  def createConsumer(): Unit ={
    val props = new Properties()
    props.put("bootstrap.servers", KafkaProperties.BROKER_LIST)
    props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    props.put("group.id", KafkaProperties.GROUP_ID)

    val consumer: KafkaConsumer[String, String] = new KafkaConsumer[String,String](props)

    consumer.subscribe(util.Arrays.asList(KafkaProperties.TOPIC))

    while(true){
      val records: ConsumerRecords[String, String] = consumer.poll(100)
      val value = records.iterator()
      while(value.hasNext){
        println(value.next())
      }
    }
  }
}

测试类

package com.kafka

object apptest {
  def main(args: Array[String]): Unit = {
    val consumer = new CustomConsumer()
    consumer.createConsumer()
  }
}

联合测试

package com.kafka

object apptest {
  def main(args: Array[String]): Unit = {
    val producer = new CustomProducer()
    producer.createProducer()

    val consumer = new CustomConsumer()
    consumer.createConsumer()
  }
}

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值