PropertiesUtil工具类
package com.douglas.util
import java.io.InputStreamReader
import java.util.Properties
object PropertiesUtil {
def load(propertiesName: String): Properties = {
val prop = new Properties()
prop.load(new InputStreamReader(Thread.currentThread().getContextClassLoader.getResourceAsStream(propertiesName), "UTF-8"))
prop
}
}
config.properties
#JDBC配置
jdbc.datasource.size=10
jdbc.url=jdbc:mysql://hadoop102:3306/spark2020?useUnicode=true&characterEncoding=utf8&rewriteBatchedStatements=true
jdbc.user=root
jdbc.password=000000
# Kafka配置
kafka.broker.list=hadoop102:9092,hadoop103:9092,hadoop104:9092
kafka.topic=testTopic
实时数据生成模块
RandomOptions
object RandomOptions {
def apply[T](opts: RanOpt[T]*): RandomOptions[T] = {
val randomOptions = new RandomOptions[T]()
for (opt <- opts) {
randomOptions.totalWeight += opt.weight
for (i <- 1 to opt.weight) {
randomOptions.optsBuffer += opt.value
}
}
randomOptions
}
def main(args: Array[String]): Unit = {
for (i <- 1 to 10) {
println(RandomOptions(RanOpt("男", 8), RanOpt("女", 2)).getRandomOpt)
}
}
}
class RandomOptions[T](opts: RanOpt[T]*) {
var totalWeight = 0
var optsBuffer = new ListBuffer[T]
def getRandomOpt: T = {
val randomNum: Int = new Random().nextInt(totalWeight)
optsBuffer(randomNum)
}
}
MockerRealTime
package com.douglas.macker
import java.util.Properties
import com.douglas.util.{PropertiesUtil, RanOpt, RandomOptions}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import scala.collection.mutable.ArrayBuffer
import scala.util.Random
case class CityInfo(city_id: Long, city_name: String, area: String)
object MockerRealTime {
def generateMockData(): Array[String] = {
val array: ArrayBuffer[String] = ArrayBuffer[String]()
val CityRandomOpt = RandomOptions(
RanOpt(CityInfo(1, "北京", "华北"), 30),
RanOpt(CityInfo(2, "上海", "华东"), 30),
RanOpt(CityInfo(3, "广州", "华南"), 10),
RanOpt(CityInfo(4, "深圳", "华南"), 20),
RanOpt(CityInfo(5, "天津", "华北"), 10)
)
val random = new Random()
for (i <- 0 to 50) {
val timestamp: Long = System.currentTimeMillis()
val cityInfo: CityInfo = CityRandomOpt.getRandomOpt
val city: String = cityInfo.city_name
val area: String = cityInfo.area
val adid: Int = 1 + random.nextInt(6)
val userid: Int = 1 + random.nextInt(6)
array += timestamp + " " + area + " " + city + " " + userid + " " + adid
}
array.toArray
}
def main(args: Array[String]): Unit = {
val config: Properties = PropertiesUtil.load("config.properties")
val brokers: String = config.getProperty("kafka.broker.list")
val topic: String = config.getProperty("kafka.topic")
val prop = new Properties()
prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
val kafkaProducer: KafkaProducer[String, String] = new KafkaProducer[String, String](prop)
while (true) {
for (line <- generateMockData()) {
kafkaProducer.send(new ProducerRecord[String, String](topic, line))
println(line)
}
Thread.sleep(2000)
}
}
}
MyKafkaUtil工具类
package com.douglas.util
import java.util.Properties
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
object MyKafkaUtil {
private val properties: Properties = PropertiesUtil.load("config.properties")
private val brokers: String = properties.getProperty("kafka.broker.list")
def getKafkaStream(topic: String, ssc: StreamingContext): InputDStream[ConsumerRecord[String, String]] = {
val kafkaParam = Map(
"bootstrap.servers" -> brokers,
"key.deserializer" -> classOf[StringDeserializer],
"value.deserializer" -> classOf[StringDeserializer],
"group.id" -> "commerce-consumer-group"
)
val dStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
ssc,
LocationStrategies.PreferConsistent,
ConsumerStrategies.Subscribe[String, String](Array(topic), kafkaParam)
)
dStream
}
}
JDBCUtil工具类
package com.douglas.util
import java.sql.{Connection, PreparedStatement, ResultSet}
import java.util.Properties
import com.alibaba.druid.pool.DruidDataSourceFactory
import javax.sql.DataSource
object JDBCUtil {
var dataSource: DataSource = init()
def init(): DataSource = {
val properties = new Properties()
val config: Properties = PropertiesUtil.load("config.properties")
properties.setProperty("driverClassName", "com.mysql.jdbc.Driver")
properties.setProperty("url", config.getProperty("jdbc.url"))
properties.setProperty("username", config.getProperty("jdbc.user"))
properties.setProperty("password", config.getProperty("jdbc.password"))
properties.setProperty("maxActive", config.getProperty("jdbc.datasource.size"))
DruidDataSourceFactory.createDataSource(properties)
}
def getConnection: Connection = {
dataSource.getConnection
}
def executeUpdate(connection: Connection, sql: String, params: Array[Any]): Int = {
var rtn = 0
var pstmt: PreparedStatement = null
try {
connection.setAutoCommit(false)
pstmt = connection.prepareStatement(sql)
if (params != null && params.length > 0) {
for (i <- params.indices) {
pstmt.setObject(i + 1, params(i))
}
}
rtn = pstmt.executeUpdate()
connection.commit()
pstmt.close()
} catch {
case e: Exception => e.printStackTrace()
}
rtn
}
def isExist(connection: Connection, sql: String, params: Array[Any]): Boolean = {
var flag: Boolean = false
var pstmt: PreparedStatement = null
try {
pstmt = connection.prepareStatement(sql)
for (i <- params.indices) {
pstmt.setObject(i + 1, params(i))
}
flag = pstmt.executeQuery().next()
pstmt.close()
} catch {
case e: Exception => e.printStackTrace()
}
flag
}
def getDataFromMysql(connection: Connection, sql: String, params: Array[Any]): Long = {
var result: Long = 0L
var pstmt: PreparedStatement = null
try {
pstmt = connection.prepareStatement(sql)
for (i <- params.indices) {
pstmt.setObject(i + 1, params(i))
}
val resultSet: ResultSet = pstmt.executeQuery()
while (resultSet.next()) {
result = resultSet.getLong(1)
}
resultSet.close()
pstmt.close()
} catch {
case e: Exception => e.printStackTrace()
}
result
}
def main(args: Array[String]): Unit = {
val connection: Connection = getConnection
val statement: PreparedStatement = connection.prepareStatement("select * from user_ad_count where userid = ?")
statement.setObject(1, 'a')
val resultSet: ResultSet = statement.executeQuery()
while (resultSet.next()) {
println("111:" + resultSet.getString(1))
}
resultSet.close()
statement.close()
connection.close()
}
}