RDD算法Action

import org.apache.spark.sql.SparkSession

/**
  * Created by LE on 2017/3/22.
  */
object DemoAction {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder.appName("DemoAction").master("local[*]").getOrCreate()
    val sc = spark.sparkContext

    // Action
       // foreach
       {
          val rddA = sc.parallelize(1 to 5)
          rddA.foreach(println)
          println("*" * 50)
        }
        // saveAsTextFile
       {
        val path = "d:/data/out/saveAsTextFile"
          val rddA = sc.parallelize(1 to 5)
        rddA.saveAsTextFile(path)
         rddA.glom.foreach(a => println(a.mkString(",")))
         println("*" * 50)
        }
       // saveAsObjectFile
       {
          val path = "d:/data/out/saveAsObjectFile"
          val rddA = sc.parallelize(1 to 5)
         rddA.saveAsObjectFile(path)
          println("*" * 50)
        }
    collect
    {
      val rddA = sc.parallelize(1 to 5)
      val out = rddA.collect
      println(out.mkString(","))
      println("*" * 50)
    }
    // collectAsMap
    {
      val rddA = sc.parallelize(Array(("A1" -> 1), ("B1" -> 2), ("B1" -> 3), ("E1" -> 4)))
      val out = rddA.collectAsMap
      println(out.mkString(","))
      println("*" * 50)
    }
    // reduceByKeyLocally
    {
      val rddA = sc.parallelize(Array(("A1" -> 1), ("B1" -> 2), ("B1" -> 3), ("E1" -> 4)))
      val out = rddA.reduceByKeyLocally(_+_)
      println(out.mkString(","))
      println("*" * 50)
    }
    // lookup
    {
      val rddA = sc.parallelize(Array(("A1" -> 1), ("B1" -> 2), ("B1" -> 3), ("E1" -> 4)))
      val out = rddA.lookup("B1")
      println(out.mkString(","))
      println("*" * 50)
      // A1 -> (1),B1(2,3) E1(4)
    }
    // count
    {
      val rddA = sc.parallelize(1 to 5)
      val out = rddA.count
      println(out)
      println("*" * 50)
    }
    // top
    {
      val rddA = sc.parallelize(1 to 5)
      val out = rddA.top(2)(Ordering[Int].reverse)
      println(out.mkString((",")))
      println("*" * 50)
    }
    // reduce
    {
      val rddA = sc.parallelize(1 to 5)
      val out = rddA.reduce(_ + _)
      println(out)
      println("*" * 50)
    }
    // fold
    {
      val rddA = sc.parallelize(1 to 5).map(_.toString)
      val out = rddA.fold("A")(_ + "@" + _)
      println(out)
      println("*" * 50)
      // A@1
      // A@2
      // A@3
      // A@4@5
      //A@A@1@A@2@A@3@A@4@5
    }
    // aggregate
    {
      val rddA = sc.parallelize(1 to 5)
      val out = rddA.aggregate("A")((a, b) => a + "@" + b, (a, b) => a + "#" + b)
      println(out)
      println("*" * 50)
      // A@1
      // A@2
      // A@3
      // A@4@5
      // A#A@1#A@2#A@3#A@4@5
    }
    spark.stop()
  }
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值