package bigdata
import java.util
import bean.Row2ColFieldBean
import com.alibaba.fastjson.JSON
import org.apache.spark.sql.{Column, DataFrame, SparkSession}
import utils.SparkUtils
import scala.collection.JavaConversions._
object Row2Column {
def change(df: DataFrame,spark:SparkSession,json:String):Unit={
val field: Row2ColFieldBean = JSON.parseObject(json).toJavaObject(classOf[Row2ColFieldBean])
val groupFieldList: util.List[String] = field.getGroupField
val changeField: String = field.getChangeField
val valueField: util.List[String] = field.getValueField
df.groupBy(groupFieldList.map(str => df.col(str.trim)): _*)
.pivot(changeField)
.sum(valueField.map(str => str.trim): _*).show()
}
def main(args: Array[String]): Unit = {
val spark: SparkSession = SparkUtils.getSparkSession()
val json = "{\n \"groupField\":[\"name\",\"id\"],\n \"changeField\":\"subject\",\n \"valueField\":[\"score\"]\n}"
val df: DataFrame = SparkUtils.readMySQL(spark)
change(df,spark,json:String)
}
}
package utils
import org.apache.spark.sql.{DataFrame, SparkSession}
object SparkUtils {
def getSparkSession():SparkSession={
SparkSession.builder()
.appName("Connect")
.master("local[*]")
.enableHiveSupport()
.getOrCreate()
}
def readMySQL(spark:SparkSession):DataFrame={
spark.read.format("jdbc")
.option("url", "jdbc:mysql://localhost:3306/bigdata")
.option("user", "root")
.option("password", "root")
.option("dbtable", "studentscore")
.load()
}
}