Utils工具类+配置文件

PropertiesUtil工具类

package com.douglas.util

import java.io.InputStreamReader
import java.util.Properties

object PropertiesUtil {

    def load(propertiesName: String): Properties = {

        val prop = new Properties()
        prop.load(new InputStreamReader(Thread.currentThread().getContextClassLoader.getResourceAsStream(propertiesName), "UTF-8"))
        prop
    }
}

config.properties

#JDBC配置
jdbc.datasource.size=10
jdbc.url=jdbc:mysql://hadoop102:3306/spark2020?useUnicode=true&characterEncoding=utf8&rewriteBatchedStatements=true
jdbc.user=root
jdbc.password=000000

# Kafka配置
kafka.broker.list=hadoop102:9092,hadoop103:9092,hadoop104:9092
kafka.topic=testTopic

实时数据生成模块

RandomOptions

object RandomOptions {

    def apply[T](opts: RanOpt[T]*): RandomOptions[T] = {

        val randomOptions = new RandomOptions[T]()

        for (opt <- opts) {
            // 累积总的权重: 8 + 2
            randomOptions.totalWeight += opt.weight

            // 根据每个元素的自己的权重,向buffer中存储数据。权重越多存储的越多
            for (i <- 1 to opt.weight) {
                // 男 男 男 男 男 男 男 男 女 女
                randomOptions.optsBuffer += opt.value
            }
        }

        randomOptions
    }

    def main(args: Array[String]): Unit = {

        for (i <- 1 to 10) {
            println(RandomOptions(RanOpt("男", 8), RanOpt("女", 2)).getRandomOpt)
        }
    }
}

class RandomOptions[T](opts: RanOpt[T]*) {

    var totalWeight = 0
    var optsBuffer = new ListBuffer[T]

    def getRandomOpt: T = {
        // 随机选择:0-9
        val randomNum: Int = new Random().nextInt(totalWeight)
        // 根据随机数,作为角标取数
        optsBuffer(randomNum)
    }
}

MockerRealTime

package com.douglas.macker

import java.util.Properties
import com.douglas.util.{PropertiesUtil, RanOpt, RandomOptions}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}

import scala.collection.mutable.ArrayBuffer
import scala.util.Random

//城市信息表: city_id :城市id  city_name:城市名称   area:城市所在大区
case class CityInfo(city_id: Long, city_name: String, area: String)

object MockerRealTime {

    /**
     * 模拟的数据
     * 格式 :timestamp area city userid adid
     * 某个时间点 某个地区 某个城市 某个用户 某个广告
     * 1604229363531 华北 北京 3 3
     */
    def generateMockData(): Array[String] = {

        val array: ArrayBuffer[String] = ArrayBuffer[String]()

        val CityRandomOpt = RandomOptions(
            RanOpt(CityInfo(1, "北京", "华北"), 30),
            RanOpt(CityInfo(2, "上海", "华东"), 30),
            RanOpt(CityInfo(3, "广州", "华南"), 10),
            RanOpt(CityInfo(4, "深圳", "华南"), 20),
            RanOpt(CityInfo(5, "天津", "华北"), 10)
        )

        val random = new Random()

        // 模拟实时数据:
        // timestamp province city userid adid
        for (i <- 0 to 50) {

            val timestamp: Long = System.currentTimeMillis()
            val cityInfo: CityInfo = CityRandomOpt.getRandomOpt
            val city: String = cityInfo.city_name
            val area: String = cityInfo.area
            val adid: Int = 1 + random.nextInt(6)
            val userid: Int = 1 + random.nextInt(6)

            // 拼接实时数据: 某个时间点 某个地区 某个城市 某个用户 某个广告
            array += timestamp + " " + area + " " + city + " " + userid + " " + adid
        }

        array.toArray
    }

    def main(args: Array[String]): Unit = {

        // 获取配置文件config.properties中的Kafka配置参数
        val config: Properties = PropertiesUtil.load("config.properties")
        val brokers: String = config.getProperty("kafka.broker.list")
        val topic: String = config.getProperty("kafka.topic")

        // 创建配置对象
        val prop = new Properties()

        // 添加配置
        prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
        prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
        prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")

        // 根据配置创建Kafka生产者
        val kafkaProducer: KafkaProducer[String, String] = new KafkaProducer[String, String](prop)

        while (true) {

            // 随机产生实时数据并通过Kafka生产者发送到Kafka集群中
            for (line <- generateMockData()) {
                kafkaProducer.send(new ProducerRecord[String, String](topic, line))
                println(line)
            }

            Thread.sleep(2000)
        }
    }
}

MyKafkaUtil工具类

package com.douglas.util

import java.util.Properties
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}

object MyKafkaUtil {

    //1.创建配置信息对象
    private val properties: Properties = PropertiesUtil.load("config.properties")

    //2.用于初始化链接到集群的地址
    private val brokers: String = properties.getProperty("kafka.broker.list")

    // 创建DStream,返回接收到的输入数据
    // LocationStrategies:根据给定的主题和集群地址创建consumer
    // LocationStrategies.PreferConsistent:持续的在所有Executor之间分配分区
    // ConsumerStrategies:选择如何在Driver和Executor上创建和配置Kafka Consumer
    // ConsumerStrategies.Subscribe:订阅一系列主题
    def getKafkaStream(topic: String, ssc: StreamingContext): InputDStream[ConsumerRecord[String, String]] = {

        //3.kafka消费者配置
        val kafkaParam = Map(
            "bootstrap.servers" -> brokers,
            "key.deserializer" -> classOf[StringDeserializer],
            "value.deserializer" -> classOf[StringDeserializer],
            "group.id" -> "commerce-consumer-group" //消费者组
        )

        val dStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](
            ssc,
            LocationStrategies.PreferConsistent,
            ConsumerStrategies.Subscribe[String, String](Array(topic), kafkaParam)
        )
        dStream
    }
}

JDBCUtil工具类

package com.douglas.util

import java.sql.{Connection, PreparedStatement, ResultSet}
import java.util.Properties

import com.alibaba.druid.pool.DruidDataSourceFactory
import javax.sql.DataSource

object JDBCUtil {

    //初始化连接池
    var dataSource: DataSource = init()

    //初始化连接池方法
    def init(): DataSource = {
        
        val properties = new Properties()
        val config: Properties = PropertiesUtil.load("config.properties")
        
        properties.setProperty("driverClassName", "com.mysql.jdbc.Driver")
        properties.setProperty("url", config.getProperty("jdbc.url"))
        properties.setProperty("username", config.getProperty("jdbc.user"))
        properties.setProperty("password", config.getProperty("jdbc.password"))
        properties.setProperty("maxActive", config.getProperty("jdbc.datasource.size"))
        
        DruidDataSourceFactory.createDataSource(properties)
    }

    //获取MySQL连接
    def getConnection: Connection = {
        dataSource.getConnection
    }

    //执行SQL语句,单条数据插入
    def executeUpdate(connection: Connection, sql: String, params: Array[Any]): Int = {

        var rtn = 0
        var pstmt: PreparedStatement = null

        try {
            connection.setAutoCommit(false)
            pstmt = connection.prepareStatement(sql)

            if (params != null && params.length > 0) {
                for (i <- params.indices) {
                    pstmt.setObject(i + 1, params(i))
                }
            }

            rtn = pstmt.executeUpdate()

            connection.commit()
            pstmt.close()
        } catch {
            case e: Exception => e.printStackTrace()
        }
        
        rtn
    }

    //判断一条数据是否存在
    def isExist(connection: Connection, sql: String, params: Array[Any]): Boolean = {
        
        var flag: Boolean = false
        var pstmt: PreparedStatement = null

        try {
            pstmt = connection.prepareStatement(sql)

            for (i <- params.indices) {
                pstmt.setObject(i + 1, params(i))
            }

            flag = pstmt.executeQuery().next()
            pstmt.close()
        } catch {
            case e: Exception => e.printStackTrace()
        }

        flag
    }

    //获取MySQL的一条数据
    def getDataFromMysql(connection: Connection, sql: String, params: Array[Any]): Long = {

        var result: Long = 0L
        var pstmt: PreparedStatement = null

        try {
            pstmt = connection.prepareStatement(sql)
            
            for (i <- params.indices) {
                pstmt.setObject(i + 1, params(i))
            }

            val resultSet: ResultSet = pstmt.executeQuery()

            while (resultSet.next()) {
                result = resultSet.getLong(1)
            }

            resultSet.close()
            pstmt.close()
        } catch {
            case e: Exception => e.printStackTrace()
        }

        result
    }

    //主方法,用于测试上述方法
    def main(args: Array[String]): Unit = {

        //1 获取连接
        val connection: Connection = getConnection

        //2 预编译SQL
        val statement: PreparedStatement = connection.prepareStatement("select * from user_ad_count where userid = ?")

        //3 传输参数
        statement.setObject(1, 'a')

        //4 执行sql
        val resultSet: ResultSet = statement.executeQuery()

        //5 获取数据
        while (resultSet.next()) {
            println("111:" + resultSet.getString(1))
        }

        //6 关闭资源
        resultSet.close()
        statement.close()
        connection.close()
    }
}
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值