1.统一化管理配置
kafka.topic="topic5"
kafka.group.id="kafka_group"
kafka.broker.list="192.168.44.132:9092"
redis.host="192.168.44.132"
redis.db=1
redis.port=6379
kafka.checkpointdir="F:\\Bigdata\\checkpoint"
2.数据的产生:
这里模拟商城产生的订单数据
package streaming.write
import java.util
import java.util.{
Date, Random}
import org.apache.commons.lang3.time.FastDateFormat
import com.alibaba.fastjson.JSONObject
import com.alibaba.fastjson.JSON
import scala.collection.mutable.ListBuffer
//数据制作类
class data {
val rd = new Random()
def getdata: JSONObject= {
val df = FastDateFormat.getInstance("yyyyMMddHHmmss").format(new Date)
val userid = rd.nextInt(100000000) toString
val courseid = rd.nextInt(1000000) toString
val fee = rd.nextInt(1000) toString
val result = Array("0", "1")
val flag = result(rd.nextInt(2)) toString
val mp = new util.HashMap[String, Object]()
mp.put("time", df)// 模拟时间
mp.put("userid", userid)// 模拟id
mp.put("courseid", courseid)// 模拟订单号
mp.put("fee", fee)// 模拟订单价格
mp.put("flag", flag)// 模拟是否付款
val datlist = new JSONObject(mp)
return datlist
}
}
3.kafka生产者,将数据上传
package streaming.write
import java.io.File
import java.util
import com.typesafe.config.ConfigFactory
import org.apache.kafka.clients.producer.{
KafkaProducer, ProducerConfig, ProducerRecord}
object write {
def main(args: Array[String]) {
//配置统一化管理
val config = ConfigFactory.parseFile(new File("F:\\Bigdata\\scalawork\\spark-maven\\src\\main\\scala\\streaming\\stream.conf"))
//Zookeeper服务器地址
val zkQuorum = config.getString("kafka.broker.list"