csv数据文件清洗【DataFrame】

package march.sql

import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.sql.functions._

/**
  * Description: TODO
  *
  * @Author: 留歌36
  * @Date: 2019/3/6 8:57
  */
object ChengduHouseAPP {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName(this.getClass.getSimpleName).master("local[2]").getOrCreate()
    // 隐式转换
    val path = "f:\\data\\chengdu_house.csv"
    val path2 = "f:\\data\\hangzhou_house.csv"
    val path3 = "f:\\data\\shanghai_house.csv"
    val path4 = "f:\\data\\kunming_house.csv"
    val DF = spark.read.option("header","true").option("inferSchema","true").csv(path)
    val DF2 = spark.read.option("header","true").option("inferSchema","true").csv(path2)
    val DF3 = spark.read.option("header","true").option("inferSchema","true").csv(path3)
    val DF4 = spark.read.option("header","true").option("inferSchema","true").csv(path4)

    val DF5 = DF
      .union(DF2.select("house_info","region","publishday","visited","attention","total_price","unit_price","url"))
      .union(DF3.select("house_info","region","publishday","visited","attention","total_price","unit_price","url"))
      .union(DF4.select("house_info","region","publishday","visited","attention","total_price","unit_price","url"))

    println("chengdu_houseDF:"+DF.count())
    println("hangzhou_houseDF2:"+DF2.count())
    println("shanghai_houseDF3:"+DF3.count())
    println("kunming_houseDF4:"+DF4.count())
    println("AllDF5:"+DF5.count())

    val DF6 = DF5
      .drop("url")
      .drop("publishday")
      .filter(DF.col("region").isNotNull)
      .filter(DF.col("unit_price").isNotNull)
      //      .filter(DF.col("visited").isNotNull)
      //      .filter(DF.col("attention").isNotNull)
//      .filter(size(split(col("house_info") ,"\\|")) === 6)
      .withColumn("rooms", split(col("house_info"), "\\|").getItem(1).substr(2,1))
      .withColumn("halls", split(col("house_info"), "\\|").getItem(1).substr(4,1))
      .withColumn("towards", split(col("house_info"), "\\|").getItem(3))
      .withColumn("area", split(col("house_info"), "\\|").getItem(2))
      .withColumn("decoration", split(col("house_info"), "\\|").getItem(4))
      .withColumn("have_elevator", split(col("house_info"), "\\|").getItem(5))
      .drop("house_info")

    val DF7 = DF6.select("region","rooms","halls","towards","decoration","have_elevator","visited"
      ,"attention","unit_price","area","total_price"
    )

    DF7.show(10,false)
    println(DF7.count())
    DF7.coalesce(1).write.option("header", "true").mode(SaveMode.Overwrite).csv("f:\\data\\logout\\")
    spark.stop()
  }
//  private def NoNull(: SparkSession) = {
//    AllDF.col("").isNotNull
//  }


}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值