缓冲区


ApplicationConfiguration

package com.example.demo;
import java.io.Serializable;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.stereotype.Component;

@Component
public class ApplicationConfiguration implements Serializable{

    private static final long serialVersionUID = 1L;

    public SparkConf sparkconf(){
        SparkConf conf = new SparkConf()
                .setMaster("local[*]")
                .setAppName("wc").set("spark.testing.memory", "2147480000").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        return conf;
    }
    public JavaSparkContext javaSparkContext(){

        return new JavaSparkContext(sparkconf());
    }

    public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() {
        return new PropertySourcesPlaceholderConfigurer();
    }

    public String filePath(){
        return "C:\\Users\\wen\\Desktop\\javawb\\aa.txt";
    }
}

ControllerTest

package com.example.demo;
import java.util.ArrayList;
import java.util.List;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;



@Controller
@RequestMapping("hello")
public class ControllerTest {
    @Autowired
    private SparkServiceTest sparkServiceTest;

    @RequestMapping("wc")
    @ResponseBody
    public String wordCount(){
        String list = sparkServiceTest.doWordCount();
        return list;
    }
}

SparkServiceTest

package com.example.demo;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;

import lineBuffer.LatLng;
import lineBuffer.PolylineBuffer;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import scala.Tuple2;
@Component
public class SparkServiceTest implements java.io.Serializable{
    @Autowired
    ApplicationConfiguration applicationConfiguration;

    public String doWordCount(){

        JavaSparkContext javaSparkContext = applicationConfiguration.javaSparkContext();
        System.out.println(javaSparkContext);

        //按行读取形成rdd,rdd中每个元素为一行
        JavaRDD<String> file = javaSparkContext.textFile(applicationConfiguration.filePath());

        //将每一行两个数值转换为坐标类型,得到坐标类型LatLng的rdd
        JavaRDD<LatLng> latln = file.map(new Function<String, LatLng>() {
            @Override
            public LatLng call(String v1) throws Exception {
                String str[]=v1.split(",");
                return new LatLng(Double.parseDouble(str[0]),Double.parseDouble(str[1]));
            }
        });
        List<LatLng> collect = latln.collect();
        String result = PolylineBuffer.getLineBufferEdgeCoords(collect, 0.1D);
        javaSparkContext.close();
        return result;
    }
}

 

 

 

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值