import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;
import java.util.Arrays;
import java.util.Iterator;
/**
* @author Administrator
* @date 2020/8/4 0004 21:35
* @description
* JavaSparkContext:wordcount
*/
public class JavaWordCountTest {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("JavaWordCountTest");
//创建初始JavaSparkContext
JavaSparkContext jsc = new JavaSparkContext(conf);
JavaRDD<String> lines = jsc.textFile(args[0]);
//切分,压平
//JavaRDD<String> word = lines.flatMap(w -> Arrays.stream(w.split(&#
笔记:spark:wordcount
最新推荐文章于 2022-08-02 18:30:15 发布