原始代码如下:
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.log4j.{Level, Logger}
object hello
{
def main(args: Array[String])
{
Logger.getLogger("org.apache.hadoop").setLevel(Level.WARN)
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.project-spark").setLevel(Level.WARN)
var conf = new SparkConf().setMaster("spark://Desktop:7077").setAppName("appName")
var sc = new SparkContext(conf)
var rdd2= sc.makeRDD(Array((1,1,"a"),(2,2,"b"),(3,3,"c")))
var rdd1 = sc.makeRDD(Array((1,1,4),(2,2,5),(3,3,6)))
println(rdd2.getClass.getSimpleName)
// var rdd3=rdd1.join(rdd2)
println(rdd1.collect())
}
}
这个原因是上面的是三维的Array,改成两维的即可进行join了。
var rdd2= sc.makeRDD(Array((1,"a"),(2,"b"),(3,"c")))
var rdd1 = sc.makeRDD(Array((1,"b"),(2,"c"),(3,"d")))