1.filter:使用一个布尔函数为RDD的每个数据项计算,并将函数返回true的项放入生成的RDD中。
package com.cb.spark.sparkrdd;
import java.util.Arrays;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
public class FilterExample {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("Filter").setMaster("local");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<String> javaRDD = sc.parallelize(Arrays.asList("cat", "dog", "pig", "mouse", "cat", "Rat", "dog"));
//cat dog pig cat Rat dog
javaRDD.filter(x -> x.length() == 3).foreach(x -> System.out.print(x + " "));
sc.stop();
}
}
2.filterByRange:该函数作用于键值对RDD,对RDD中的元素进行过滤,返回键在指定范围中的元素。
package com.cb.spark.rdd
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
object FilterByRange {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setMaster("local[2]").setAppName("FilterByRange")
val sc = new SparkContext(conf)
val rdd1 = sc.parallelize(List(("e", 5), ("c", 3), ("d", 4), ("c", 2), ("a", 1)))
rdd1.filterByRange("c", "e").foreach(println)
}
}