调用create方法获取DataFrame
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{LongType, StringType, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession, types}
/**
* 一、可以调用create方法构建DF
* Javabeen + 反射
*/
object _01DFCreatMethod {
def main(args: Array[String]): Unit = {
//1.获取session
val session: SparkSession = SparkSession
.builder()
.appName("")
.master("local[*]")
.getOrCreate()
//2.创建Student类
// 通过工具类完成java和scala集合的互相转换
var list = List(new Student("1","lisi","m"),
new Student("2","l","m"),
new Student("1","ff","f")
)
//3.导入java的集合转换
// 创建DF并传入一个list 和 classof[T]
import session.implicits._
import scala.collection.JavaConversions._
val df: DataFrame = ses