java-spark入门:
- 文本转RDD
- RDD转List
- RDD转其他数据类型
import java.util.List;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;
public class Test {
public static void main(String[] args) {
SparkConf conf = new SparkConf();
conf.setAppName("local_Test");
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
conf.setMaster("local");
SparkContext sc = SparkContext.getOrCreate(conf);
JavaSparkContext jsc = JavaSparkContext.fromSparkContext(sc);
// 1. 文本转RDD、【去重】
JavaRDD<String> fileRDD = jsc.textFile("C:\\Users\\A\\Desktop\\0608.txt").distinct();
// 2. RDD转List
List<String> ids = fileRDD.collect();
// 3. RDD转其他数据类型
JavaPairRDD<String, String> mapToPair = fileRDD.mapToPair(new PairFunction<String, String, String>() {
private static final long serialVersionUID = -6390835853130436409L;
@Override
public Tuple2<String, String> call(String t) throws Exception {
return new Tuple2<String, String>(t, t + "_xxxxx_hhhhh");
}
});
// 4. 新RDD操作处理
mapToPair.foreach(new VoidFunction<Tuple2<String, String>>() {
private static final long serialVersionUID = 7209851065087216424L;
@Override
public void call(Tuple2<String, String> t) throws Exception {
String key = t._1;
String value = t._2;
System.out.println(key + "__" + value);
}
});
jsc.close();
}
}