rdd很容易生成,但是有很多操作必须得pairRDD来做
那么怎么生成PairRDD呢?主要是scala的隐式转换
数据 pair_testscala> data_pair_2.keys.take(5)
res12: Array[(String, String)] = Array((10,100), (11,100), (12,100), (13,100), (14,100))
10,100,12,201505
11,100,12,201505
12,100,12,201505
13,100,12,201505
14,100,12,201505
15,100,12,201505
shell下代码演示
scala> val data=sc.textFile("/home/wangtuntun/pair_test").map(_.split(","))
data: org.apache.spark.rdd.RDD[Array[String]] = MapPartitionsRDD[10] at map at <console>:27
scala> val data_pair_1=data.map( x=>( (x(0)),(x(1),x(2),x(3)) ) )
data_pair_1: org.apache.spark.rdd.RDD[(String, (String, String, String))] = MapPartitionsRDD[11] at map at <console>:29
scala> data_pair_1.keys.take(5)
res10: Array[String] = Array(10, 11, 12, 13, 14)
scala> val data_pair_2=data.map( x=> ( (x(0),x(1)),(x(2),x(3)) ) )
data_pair_2: org.apache.spark.rdd.RDD[((String, String), (String, String))] = MapPartitionsRDD[15] at map at <console>:29