import org.apache.spark.sql.functions._
val newDataFrame = dataFrame.withColumn("id", monotonically_increasing_id)
这样只能添加id列,不能单调递增
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions.row_number
val w = Window.orderBy("count")
val result = df.withColumn("index", row_number().over(w))
这样可以,从1到df.length