![](https://img-blog.csdnimg.cn/20190718094231137.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L3dlaXhpbl80MjAzNDIxNw==,size_16,color_FFFFFF,t_70)
ApplicationConfiguration
package com.example.demo;
import java.io.Serializable;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.stereotype.Component;
@Component
public class ApplicationConfiguration implements Serializable{
private static final long serialVersionUID = 1L;
public SparkConf sparkconf(){
SparkConf conf = new SparkConf()
.setMaster("local[*]")
.setAppName("wc").set("spark.testing.memory", "2147480000").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
return conf;
}
public JavaSparkContext javaSparkContext(){
return new JavaSparkContext(sparkconf());
}
public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
public String filePath(){
return "C:\\Users\\wen\\Desktop\\javawb\\aa.txt";
}
}
ControllerTest
package com.example.demo;
import java.util.ArrayList;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
@Controller
@RequestMapping("hello")
public class ControllerTest {
@Autowired
private SparkServiceTest sparkServiceTest;
@RequestMapping("wc")
@ResponseBody
public String wordCount(){
String list = sparkServiceTest.doWordCount();
return list;
}
}
SparkServiceTest
package com.example.demo;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import lineBuffer.LatLng;
import lineBuffer.PolylineBuffer;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import scala.Tuple2;
@Component
public class SparkServiceTest implements java.io.Serializable{
@Autowired
ApplicationConfiguration applicationConfiguration;
public String doWordCount(){
JavaSparkContext javaSparkContext = applicationConfiguration.javaSparkContext();
System.out.println(javaSparkContext);
//按行读取形成rdd,rdd中每个元素为一行
JavaRDD<String> file = javaSparkContext.textFile(applicationConfiguration.filePath());
//将每一行两个数值转换为坐标类型,得到坐标类型LatLng的rdd
JavaRDD<LatLng> latln = file.map(new Function<String, LatLng>() {
@Override
public LatLng call(String v1) throws Exception {
String str[]=v1.split(",");
return new LatLng(Double.parseDouble(str[0]),Double.parseDouble(str[1]));
}
});
List<LatLng> collect = latln.collect();
String result = PolylineBuffer.getLineBufferEdgeCoords(collect, 0.1D);
javaSparkContext.close();
return result;
}
}