Spark报错:org.apache.spark.SparkException: Task not serializable
一眼就能看出是没有进行序列化,在spark程序中,所有的class都要序列化。如果不需要序列化的用transient修饰
修改后的代码:
package com.example.controller;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import scala.Tuple2;
import java.io.IOException;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
/**
* @ClassName SparkTest
* @Author yupanpan
* @Date 2020/5/25 16:58
*/
@RestController
@RequestMapping("/spark")
public class SparkTest implements Serializable {
@Autowired
private transient JavaSparkContext sparkContext;
@GetMapping("/test")
public void test() throws IOException {
//创建RDD:从文件中读取待统计文本
JavaRDD<String> lines = sparkContext.textFile("D:\\yop_sdk_config_default.json");
//转换RDD:将文本内容按空格分割成单词
JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
@Override
public Iterator<String> call(String s) {
return Arrays.asList(s.split("\\s")).iterator();
}
});
//转换RDD:将单词<String>转换成<String,Integer>,每个单词初始化次数为1
JavaPairRDD<String, Integer> ones = words.mapToPair(new PairFunction<String, String, Integer>() {
@Override
public Tuple2<String, Integer> call(String s) {
return new Tuple2<String, Integer>(s, 1);
}
});
//行动RDD:统计每个单词的次数<String,Integer>
JavaPairRDD<String, Integer> counts = ones.reduceByKey(new Function2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer i1, Integer i2) {
return i1 + i2;
}
});
//获取整个RDD的数据
List<Tuple2<String, Integer>> output = counts.collect();
for (Tuple2<?, ?> tuple : output) {
System.out.println(tuple._1() + ": " + tuple._2());
}
sparkContext.stop();
sparkContext.close();
}
}
这里只是测试,第一是controller没有序列化,序列化之后,还是报错,然后JavaSparkContext没有序列化,Spark程序处理成员变量也是需要序列化的,由于我是@Configuration @Bean形式先配置了的,注入了sparkContext,sparkContext本身是不需要序列化,加上transient,问题解决
结果: