Spark项目之 sparkDemo 七 SparkDemo解析

下面我们在来看,我们把各个查询数据方法分开进行,然后把数据整合起来,readShadowSocks方法我们用来读取登陆数据

readHistory读取注册数据,retainAddData方法进行数据分析整合,outJDBCData方法将留存数据写进mysql中

package com.shadowsocks

import org.apache.spark.sql.SparkSession
import com.util.DateUtil
import org.apache.spark.sql.functions._
import org.apache.spark.sql.{ DataFrame, Dataset, Row, SparkSession }
import org.apache.spark.sql.functions.{ when, _ }
import java.util.Properties

object ReadTableCommon {
  /**
   * 读取当日登陆用户数
   */
  def readShadowSocks(spark: SparkSession)(logTable: String, targetDay: String) = {
    import spark.implicits._
    spark.read.table(logTable).where($"login_time" === targetDay)
      .select($"login_time", $"ip_send")
      .distinct()
  }

  /**
   * 读取留存数据
   */
  def readHistory(spark: SparkSession)(registerTable: String, targetDay: String) = {
    val (seven_day, fourteen_day, thirty_day) = (DateUtil.getDateByDay(targetDay, -7), DateUtil.getDateByDay(targetDay, -14), DateUtil.getDateByDay(targetDay, -30))
    import spark.implicits._
    spark.read.table(registerTable)
      .filter(($"register_time" <= targetDay && $"register_time" >= seven_day) || $"register_time" === fourteen_day || $"register_time" === thirty_day)
      .select($"id", $"ip_send", $"ip_end", $"register_time")
      .distinct
  }

  /**
   * 留存统计公共模块
   */
  def retainAddData(spark: SparkSession)(targetDay: String, regTableDF: DataFrame, logTableDF: DataFrame) = {
    import spark.implicits._
    regTableDF.join(logTableDF, Seq("ip_send"))
      .groupBy("register_time").agg(count("ip_send").as("retained_person_num"))
      .withColumn("target_day", when($"retained_person_num".isNotNull, targetDay))
      .select($"target_day", $"register_time".as("retained_day"), datediff($"target_day", $"register_time").as("retained_day_num"), $"retained_person_num")
  }

  /**
   * 将DataFrame的数据输入到mysql数据库
   */
def outJDBCData(spark: SparkSession)(url: String, regTableDF: DataFrame, tableName: String) = {
//    val url = "jdbc:mysql://192.168.131.155:3306/hadoop?characterEncoding=UTF-8"
    val connectionProperties = new Properties()
    connectionProperties.setProperty("user", "root");// 设置用户名
    connectionProperties.setProperty("password", "root");// 设置密码
//    regTableDF.write.jdbc(url, "shadowsocks_retain", connectionProperties)//新建数据库表,并添加数据(数据库中必须没有数据表格)
    regTableDF.write.mode("append").jdbc(url, tableName, connectionProperties)//在数据表中追加数据
  }
  
  
}
package com.shadowsocks

import java.util.Properties

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.sql.types.LongType
import org.apache.spark.sql.types.DateType
import org.apache.spark.sql.types.TimestampType

object Shadowsocks {
  def main(args: Array[String]): Unit = {
    val Array(targetDay, registerTable, logTable, url) = args
    val spark = SparkSession.builder().appName("shadowsocks").enableHiveSupport().getOrCreate()
    val regTableDF = ReadTableCommon.readShadowSocks(spark)(logTable, targetDay)
    val logTableDF = ReadTableCommon.readHistory(spark)(registerTable, targetDay)
    val retainDF   = ReadTableCommon.retainAddData(spark)(targetDay, regTableDF, logTableDF)
    retainDF.schema
    .add("id",LongType,false)
    .add("target_day",DateType,true)
    .add("retained_day",DateType,true)
    .add("retained_day_num",IntegerType,true)
    .add("retained_person_num",IntegerType,true)
    .add("create_time",TimestampType,false)
    ReadTableCommon.outJDBCData(spark)(url, retainDF, "shadowsocks_retain")
    spark.close()
  }
}

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值