Spark Streaming入门 - 结果保存到hdfs文件系统中

1 先到49服务器上,用nc发送消息
2 详细代码如下,
注意:保存前先用 repartition(1),不然会有很多小文件

package cn.taobao;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.spark.api.java.StorageLevels;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import scala.Tuple2;
import java.util.Arrays;

public class Save_1 {
    public static void main(String[] args) throws Exception {
        // StreamingContext 编程入口
        JavaStreamingContext ssc = new JavaStreamingContext(
                "local[*]",
                "Save_1",
                Durations.seconds(10),
                System.getenv("SPARK_HOME"),
                JavaStreamingContext.jarOfClass(Save_1.class.getClass()));

        ssc.sparkContext().setLogLevel("ERROR");

        //数据接收器(Receiver)
        //创建一个接收器(JavaReceiverInputDStream),这个接收器接收一台机器上的某个端口通过socket发送过来的数据并处理
        JavaReceiverInputDStream<String> lines = ssc.socketTextStream(
                "158.158.4.49", 9998, StorageLevels.MEMORY_AND_DISK_SER);

        /*
        假如输入 aa bb cc aa
         */

        /*
        返回 aa
            bb
            cc
            aa
         */
        JavaDStream<String> stringJavaDStream = lines.flatMap(xx -> {
            String[] str_split = xx.split(" ");
            return Arrays.asList(str_split).iterator();
        });

        /*
        返回 (aa,1)
            (bb,1)
            (cc,1)
            (aa,1)
         */
        JavaPairDStream<String, Integer> stringIntegerJavaPairDStream = stringJavaDStream.mapToPair(xx -> {
            return new Tuple2<>(xx, 1);
        });

        /*
        返回 (aa,2)
            (bb,1)
            (cc,1)
         */
        JavaPairDStream<String, Integer> stringIntegerJavaPairDStream1 = stringIntegerJavaPairDStream.reduceByKey((Integer v1, Integer v2) ->
        {
            return v1 + v2;
        });

        //以文本的格式保存到HDFS上
        stringIntegerJavaPairDStream1.repartition(1).mapToPair(xx -> {
            Text text = new Text();
            text.set(xx.toString());
            return new Tuple2<>(NullWritable.get(), text);
        }).saveAsHadoopFiles("hdfs://localhost:9000/dir_spark_test/", "-txt", NullWritable.class, Text.class, TextOutputFormat.class);

        //以SEQUENCE的文件格式保存到HDFS上
        stringIntegerJavaPairDStream1.repartition(1).mapToPair(xx -> {
            Text text = new Text();
            text.set(xx.toString());
            return new Tuple2<>(NullWritable.get(), text);
        }).saveAsHadoopFiles("hdfs://localhost:9000/dir_spark_test/", "-seq", NullWritable.class, Text.class, SequenceFileOutputFormat.class);

        //显式的启动数据接收
        ssc.start();
        try {
            //来等待计算完成
            ssc.awaitTermination();
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            ssc.close();
        }
    }
}

效果如下


评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值