package com.zyc.sparksql
import java.text.SimpleDateFormat
import java.util.Locale
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}
/**
* Created with IntelliJ IDEA.
* Author: zyc2913@163.com
* Date: 2020/10/10 13:55
* Version: 2.0
* Description:SparkSQL中RDD,DF(DataFrame),DS(DataSet)的相互转换
*/
object SparkSqlDemo1 {
def main(args: Array[String]): Unit = {
//1.获取SparkSql程序入口
val spark = SparkSession.builder()
.appName("sparkSql")
.master("local[2]")
.getOrCreate()
//导入隐式转换
import spark.implicits._
val sc = spark.sparkContext
//2.创建dataframe实例
//2.1日志处理案例
//2.1.1导入日志文件,转为RDD
val rdd:RDD[String] = sc.textFile("C:\\Users\\Administrator\\Desktop\\book\\logs\\access.log-2019092