创建sparksession
支持hive
val spark = SparkSession.builder().appName(" PlaceCapacity").config("spark.some.config.option", "some-value")
.config("spark.sql.hive.filesourcePartitionFileCacheSize", 500 * 1024 * 1024).enableHiveSupport().getOrCreate()
本地测试
val spark = SparkSession.builder().appName(" PlaceCapacity").master("local[2]").getOrCreate()
导入隐士转换(用于flatmap返回以及rdd与df直接转换)
import spark.implicits._
如果不定义汇报一些关于implicits的错误
写入数据到redis
data_to_redis.foreachPartition(x => {
val jedis = RedisSentinelCluster.bulider()
try {
val pipeline = jedis.pipelined()
while (x.hasNext) {
val row = x.next()
val key = "route:"+row.getAs("key").toString