java spark 项目_spark应用程序如何在Java项目中运行

本文详细介绍了如何在Java项目中运行Spark应用程序。通过示例代码展示了从读取数据到统计处理的过程,强调了序列化的重要性,并提供了在Spark集群上运行Java程序的步骤,包括使用maven-assembly-plugin打包依赖和修改run-example脚本。
摘要由CSDN通过智能技术生成

spark应用程序如何在Java项目中运行

发布时间:2020-12-08 15:40:21

来源:亿速云

阅读:97

作者:Leah

这篇文章将为大家详细讲解有关spark应用程序如何在Java项目中运行,文章内容质量较高,因此小编分享给大家做个参考,希望大家阅读完这篇文章后对相关知识有一定的了解。

如下所示:

package org.shirdrn.spark.job;

import java.io.File;

import java.io.IOException;

import java.util.Arrays;

import java.util.Collections;

import java.util.Comparator;

import java.util.List;

import java.util.regex.Pattern;

import org.apache.commons.logging.Log;

import org.apache.commons.logging.LogFactory;

import org.apache.spark.api.java.JavaPairRDD;

import org.apache.spark.api.java.JavaRDD;

import org.apache.spark.api.java.JavaSparkContext;

import org.apache.spark.api.java.function.FlatMapFunction;

import org.apache.spark.api.java.function.Function2;

import org.apache.spark.api.java.function.PairFunction;

import org.shirdrn.spark.job.maxmind.Country;

import org.shirdrn.spark.job.maxmind.LookupService;

import scala.Serializable;

import scala.Tuple2;

public class IPAddressStats implements Serializable {

private static final long serialVersionUID = 8533489548835413763L;

private static final Log LOG = LogFactory.getLog(IPAddressStats.class);

private static final Pattern SPACE = Pattern.compile(" ");

private transient LookupService lookupService;

private transient final String geoIPFile;

public IPAddressStats(String geoIPFile) {

this.geoIPFile = geoIPFile;

try {

// lookupService: get country code from a IP address

File file = new File(this.geoIPFile);

LOG.info("GeoIP file: " + file.getAbsolutePath());

lookupService = new AdvancedLookupService(file, LookupService.GEOIP_MEMORY_CACHE);

} catch (IOException e) {

throw new RuntimeException(e);

}

}

@SuppressWarnings("serial")

public void stat(String[] args) {

JavaSparkContext ctx = new JavaSparkContext(args[0], "IPAddressStats",

System.getenv("SPARK_HOME"), JavaSparkContext.jarOfClass(IPAddressStats.class));

JavaRDD lines = ctx.textFile(args[1], 1);

// splits and extracts ip address filed

JavaRDD words = lines.flatMap(new FlatMapFunction() {

@Override

public Iterable call(String s) {

// 121.205.198.92 - - [21/Feb/2014:00:00:07 +0800] "GET /archives/417.html HTTP/1.1" 200 11465 "http://shiyanjun.cn/archives/417.html/" "Mozilla/5.0 (Windows NT 5.1; rv:11.0) Gecko/20100101 Firefox/11.0"

// ip address

return Arrays.asList(SPACE.split(s)[0]);

}

});

// map

JavaPairRDD ones = words.map(new PairFunction() {

@Override

public Tuple2 call(String s) {

return new Tuple2(s, 1);

}

});

// reduce

JavaPairRDD counts = ones.reduceByKey(new Function2() {

@Override

public Integer call(Integer i1, Integer i2) {

return i1 + i2;

}

});

List> output = counts.collect();

// sort statistics result by value

Collections.sort(output, new Comparator>() {

@Override

public int compare(Tuple2 t1, Tuple2 t2) {

if(t1._2 < t2._2) {

return 1;

} else if(t1._2 > t2._2) {

return -1;

}

return 0;

}

});

writeTo(args, output);

}

private void writeTo(String[] args, List> output) {

for (Tuple2 tuple : output) {

Country country = lookupService.getCountry((String) tuple._1);

LOG.info("[" + country.getCode() + "] " + tuple._1 + "\t" + tuple._2);

}

}

public static void main(String[] args) {

// ./bin/run-my-java-example org.shirdrn.spark.job.IPAddressStats spark://m1:7077 hdfs://m1:9000/user/shirdrn/wwwlog20140222.log /home/shirdrn/cloud/programs/spark-0.9.0-incubating-bin-hadoop1/java-examples/GeoIP_DATABASE.dat

if (args.length < 3) {

System.err.println("Usage: IPAddressStats &#

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值