import org.apache.spark.sql.{
SparkSession, functions}
import org.apache.spark.sql.functions.{
array_contains, col, explode, size, split, struct}
import org.junit.Test
class complexType {
val spark = new SparkSession.Builder()
.appName("chemaDemo1")
.config("spark.testing.memory", "471859200")
.master("local[6]")
.getOrCreate()
val df = spark.read.format("csv")
.option("header", "true")
.option("inferSchema", "true")
.load("data/retail-data/by-day/2010-12-01.csv")
@Test
def structDemo():Unit = {
Spark处理复杂类型的数据
最新推荐文章于 2023-02-15 11:32:38 发布
![](https://img-home.csdnimg.cn/images/20240709112858.png)