package com.fengling.sql
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.slf4j.LoggerFactory
case class People(name: String, age: Int)
/**
* @author fengchengliang@126.com
* @date 2019-10-17
*/
object SparkSQLDemo {
val logger = LoggerFactory.getLogger(SparkSQLDemo.getClass)
def main(args: Array[String]): Unit = {
val config = Map(
"spark.cores" -> "local[*]"
)
val sparkConf = new SparkConf().setMaster(config("spark.cores")).setAppName("Spark SQL basic example")
// 创建SparkContext并设置AppName
val spark = SparkSession.builder()
.config(sparkConf)
.getOrCreate()
// For implicit conversions like converting RDDs to DataFrames
import spark.implicits._
val rdd = spark.sparkContext.parallelize(List(People("张三",12),People("李四",18),People("王五",2
我的大数据之旅-Spark SQL DataFrame常用操作
最新推荐文章于 2023-06-28 00:16:48 发布