kafka+java工具类_Kafka工具类(Scala)

本文介绍了一个基于Scala编写的Kafka工具类,用于Spark Streaming与Kafka的集成。工具类加载了配置文件,包含了Kafka消费者参数设置,并提供了三种方式创建直接流,以供不同场景下的消费策略选择。
摘要由CSDN通过智能技术生成

1、配置文件config.properties

# Kafka配置

kafka.broker.list= hadoop300:9092,hadoop301:9092,hadoop302:9092# Redis配置

redis.host=hadoop300

redis.port=6379

2、读取Properties

package com.duoduo.realtime.utils

import java.io.InputStreamReader

import java.util.Properties/**

* Author z

* Date 2020-08-27 10:04:21*/

objectPropertiesUtil {

def main(args: Array[String]): Unit={

val properties: Properties= PropertiesUtil.load("config.properties")

println(properties.getProperty("kafka.broker.list"))

}

def load(propertiesName: String)={

val p=newProperties()

p.load(newInputStreamReader(

Thread.currentThread().getContextClassLoader

.getResourceAsStream(propertiesName)

,"UTF-8"))

p

}

}

f88937e50c7369d691fd3fcb34239d2d.png

3、POM文件依赖

xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"

xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">

dw-stream

com.duoduo

1.0-SNAPSHOT

4.0.0

dw-realtime

2.4.0

2.11.8

1.0.0

UTF-8

UTF-8

1.8

com.alibaba

fastjson

1.2.56

org.apache.spark

spark-core_2.11

${spark.version}

org.apache.spark

spark-streaming_2.11

${spark.version}

org.apache.kafka

kafka-clients

${kafka.version}

org.apache.spark

spark-streaming-kafka-0-10_2.11

${spark.version}

redis.clients

jedis

2.9.0

org.apache.phoenix

phoenix-spark

4.14.2-HBase-1.3

org.apache.spark

spark-sql_2.11

${spark.version}

io.searchbox

jest

5.3.3

org.slf4j

slf4j-api

net.java.dev.jna

jna

4.5.2

org.codehaus.janino

commons-compiler

2.7.8

net.alchim31.maven

scala-maven-plugin

3.4.6

compile

testCompile

org.apache.maven.plugins

maven-assembly-plugin

3.0.0

jar-with-dependencies

make-assembly

package

single

4、工具类

1 package com.duoduo.realtime.utils2

3 import java.util.Properties4

5 import org.apache.kafka.clients.consumer.ConsumerRecord6 import org.apache.kafka.common.TopicPartition7 import org.apache.kafka.common.serialization.StringDeserializer8 import org.apache.spark.streaming.StreamingContext9 import org.apache.spark.streaming.dstream.InputDStream10 import org.apache.spark.streaming.kafka010.{ConsumerStrategies, ConsumerStrategy, KafkaUtils, LocationStrategies}11

12 /**13 * Author z14 * Date 2020-08-27 10:02:2115 */

16 objectKafkaUtil {17 private val properties: Properties = PropertiesUtil.load("config.properties")18 val broker_list = properties.getProperty("kafka.broker.list")19 var kafkaParam =collection.mutable.Map(20 "bootstrap.servers" -> broker_list, //用于初始化链接到集群的地址

21 "key.deserializer" ->classOf[StringDeserializer],22 "value.deserializer" ->classOf[StringDeserializer],23 //用于标识这个消费者属于哪个消费团体

24 "group.id" -> "gmall_consumer_group",25 //如果没有初始化偏移量或者当前的偏移量不存在任何服务器上,可以使用这个配置属性26 //可以使用这个配置,latest自动重置偏移量为最新的偏移量

27 "auto.offset.reset" -> "latest",28 //如果是true,则这个消费者的偏移量会在后台自动提交,但是kafka宕机容易丢失数据29 //如果是false,会需要手动维护kafka偏移量

30 "enable.auto.commit" -> (false: java.lang.Boolean)31 )32

33 def getKafkaStream(topic: String, ssc: StreamingContext)34 : InputDStream[ConsumerRecord[String, String]] ={35 val dStream: InputDStream[ConsumerRecord[String, String]] =KafkaUtils36 .createDirectStream[String, String](37 ssc,38 LocationStrategies.PreferConsistent,39 ConsumerStrategies.Subscribe[String, String](Array(topic), kafkaParam)40 )41 dStream42 }43

44 def getKafkaStream(topic: String, ssc: StreamingContext, groupid: String)45 : InputDStream[ConsumerRecord[String, String]] ={46 kafkaParam("group.id") =groupid47 val dStream: InputDStream[ConsumerRecord[String, String]] =KafkaUtils48 .createDirectStream[String, String](49 ssc,50 LocationStrategies.PreferConsistent,51 ConsumerStrategies.Subscribe[String, String](Array(topic), kafkaParam)52 )53 dStream54 }55

56 def getKafkaStream(topic: String,57 ssc: StreamingContext,58 offsets: Map[TopicPartition, Long]59 , groupid: String)60 : InputDStream[ConsumerRecord[String, String]] ={61 kafkaParam("group.id") =groupid62 val dStream: ConsumerStrategy[String, String] =ConsumerStrategies63 .Subscribe[String, String](Array(topic), kafkaParam, offsets)64 KafkaUtils.createDirectStream[String, String](65 ssc,66 LocationStrategies.PreferConsistent,67 dStream68 )69 }70 }

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值