def reduce(): Unit = {
val conf = new SparkConf().setAppName("reduce").setMaster("local")
val sc = new SparkContext(conf)
val numbersArray = Array(1, 2, 3, 4, 5, 6, 7, 8)
val numberRDD = sc.parallelize(numbersArray, 1)
val numbers = numberRDD.reduce(_ + _)
println(numbers)
}
def collect(): Unit = {
val conf = new SparkConf().setAppName("collect").setMaster("local")
val sc = new SparkContext(conf)
val numbersArray = Array(1, 2, 3, 4, 5, 6, 7, 8)
val numberRDD = sc.parallelize(numbersArray, 1)
val numbers = numberRDD.map(num => num * 2)
val doubleNumberArray = numbers.collect()
for (num <- doubleNumberArray) {
println(num)
}
}
def count(): Unit = {
val conf = new SparkConf().setAppName("count").setMaster("local")
val sc = new SparkContext(conf)
val numbersArray = Array(1, 2, 3, 4, 5, 6, 7, 8)
val numberRDD = sc.parallelize(numbersArray, 1)
val count = numberRDD.count()
println(count)
}
def take(): Unit = {
val conf = new SparkConf().setAppName("take").setMaster("local")
val sc = new SparkContext(conf)
val numbersArray = Array(1, 2, 3, 4, 5, 6, 7, 8)
val numberRDD = sc.parallelize(numbersArray, 1)
val doubleNumberArray = numberRDD.take(3)
for (num <- doubleNumberArray) {
println(num)
}
}
private static void countByKey() {
//创建SparkConf
SparkConf conf = new SparkConf()
.setAppName("countByKey")
.setMaster("local");
//创建JavaSparkContext
JavaSparkContext sc = new JavaSparkContext(conf);
//构造集合
List<Tuple2<String, String>> scoresList = Arrays.asList(
new Tuple2<>("class1", "tom"),
new Tuple2<>("class2", "jack"),
new Tuple2<>("class1", "leo"),
new Tuple2<>("class2", "marry"));
//并行化集合,创建JavaPairRDD
JavaPairRDD<String, String> students = sc.<String, String>parallelizePairs(scoresList);
//对RDD应用countByKey操作,统计每个班级的学生人数,就是统计每个key对应的元素个数
//countByKey返回的类型,直接就是Map<String,Object>
Map<String, Long> studentCounts = students.countByKey();
for (Map.Entry<String, Long> studentCount : studentCounts.entrySet()) {
System.out.println(studentCount.getKey() + ":" + studentCount.getValue());
}
//关闭JavaSparkContext
sc.close();
}
6.2 Scala
def countByKey(): Unit = {
val conf = new SparkConf().setAppName("countByKey").setMaster("local")
val sc = new SparkContext(conf)
val studentList = Array(new Tuple2[String, String]("class1", "aaa"),
new Tuple2[String, String]("class2", "mack"),
new Tuple2[String, String]("class1", "tom"),
new Tuple2[String, String]("class2", "pos"))
val scores = sc.parallelize(studentList, 1)
val students = scores.countByKey()
println(students)
}