submit SparkKMeans

原创 2016年08月28日 23:15:24
spark-submit --class "org.apache.spark.examples.SparkKMeans" --master local[4] /opt/spark/spark-1.6.1-bin-hadoop1/lib/spark-examples-1.6.1-hadoop1.2.1.jar /home/sc/Desktop/data.txt 2 0.0001




1、mkdir ./kmeans
2、mkdir -p ./kmeans/src/main/scala
3、mv SparkKMeans.scala ./kmeans/src/main/scala
4、touch Spark.sbt
5、sbt package


root@scala kmeans]# /opt/sbt/bin/sbt package
[info] Set current project to SparkKMeans Project (in build file:/home/sc/Desktop/kmeans/)
[warn] The `-` command is deprecated in favor of `onFailure` and will be removed in 0.14.0
[info] Updating {file:/home/sc/Desktop/kmeans/}kmeans...
[info] Resolving com.sun.jersey.jersey-test-framework#jersey-test-framework-griz[info] Resolving com.fasterxml.jackson.module#jackson-module-scala_2.10;2.4.4 ..[info] Resolving org.fusesource.jansi#jansi;1.4 ...
[info] Done updating.
[info] Compiling 1 Scala source to /home/sc/Desktop/kmeans/target/scala-2.10/classes...
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:21: not found: object breeze *********************************************************************
[error] import breeze.linalg.{Vector, DenseVector, squaredDistance}
[error]        ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:35: not found: value DenseVector******************************************************************
[error]     DenseVector(line.split(' ').map(_.toDouble))
[error]     ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:43: not found: value squaredDistance***********************************************************
[error]       val tempDist = squaredDistance(p, centers(i))
[error]                      ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:83: type mismatch;
[error]  found   : Vector[Double]************************************************
[error]  required: String
[error]       val pointStats = closest.reduceByKey{case ((p1, c1), (p2, c2)) => (p1 + p2, c1 + c2)}
[error]                                                                               ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:90: not found: value squaredDistance*****************************************************
[error]         tempDist += squaredDistance(kPoints(i), newPoints(i))
[error]                     ^
[error] 5 errors found
[error] (compile:compileIncremental) Compilation failed
[error] Total time: 137 s, completed Aug 4, 2016 12:13:19 AM
[error] Not a valid command: sbt-create
[error] Not a valid key: sbt-create (similar: state, sbt-resolver, streams)
[error] sbt-create
[error]           ^
[info] Set current project to SparkKMeans Project (in build file:/home/sc/Desktop/kmeans/)
[info] Compiling 1 Scala source to /home/sc/Desktop/kmeans/target/scala-2.10/classes...
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:21: not found: object breeze
[error] import breeze.linalg.{Vector, DenseVector, squaredDistance}
[error]        ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:35: not found: value DenseVector
[error]     DenseVector(line.split(' ').map(_.toDouble))
[error]     ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:43: not found: value squaredDistance
[error]       val tempDist = squaredDistance(p, centers(i))
[error]                      ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:83: type mismatch;
[error]  found   : Vector[Double]
[error]  required: String
[error]       val pointStats = closest.reduceByKey{case ((p1, c1), (p2, c2)) => (p1 + p2, c1 + c2)}
[error]                                                                               ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:90: not found: value squaredDistance
[error]         tempDist += squaredDistance(kPoints(i), newPoints(i))
[error]                     ^
[error] 5 errors found
[error] (compile:compileIncremental) Compilation failed
[error] Total time: 361 s, completed Aug 4, 2016 12:19:58 AM
[root@scala kmeans]# ls
project  Spark.sbt  Spark.sbt~  src  target
[root@scala kmeans]# cd target
[root@scala target]# ls
resolution-cache  scala-2.10  streams
[root@scala target]# cd scala-2.10
[root@scala scala-2.10]# ls
classes
[root@scala scala-2.10]# cd classes
[root@scala classes]# ls
[root@scala classes]# cd ..
[root@scala scala-2.10]# cd ..
[root@scala target]# cd ..
[root@scala kmeans]# ls
project  Spark.sbt  Spark.sbt~  src  target
[root@scala kmeans]# cd ..
[root@scala Desktop]# cd kmeans
[root@scala kmeans]# ls
project  Spark.sbt  Spark.sbt~  src  target
[root@scala kmeans]# rm -rf project
[root@scala kmeans]# rm -rf target
[root@scala kmeans]# ls
Spark.sbt  Spark.sbt~  src
[root@scala kmeans]# rm -rf /src/main/scala/SparkKMeans.scala
[root@scala kmeans]# ls
Spark.sbt  Spark.sbt~  src
[root@scala kmeans]# /opt/sbt/bin/sbt package
[info] Set current project to SparkKMeans Project (in build file:/home/sc/Desktop/kmeans/)
[warn] The `-` command is deprecated in favor of `onFailure` and will be removed in 0.14.0
[info] Updating {file:/home/sc/Desktop/kmeans/}kmeans...
[info] Resolving com.sun.jersey.jersey-test-framework#jersey-test-framework-gr[info] Resolving com.fasterxml.jackson.module#jackson-module-scala_2.10;2.4.4 [info] Resolving org.fusesource.jansi#jansi;1.4 ...
[info] Done updating.
[info] Compiling 1 Scala source to /home/sc/Desktop/kmeans/target/scala-2.10/classes...
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:4: not found: object breeze
[error] import breeze.linalg.{Vector, DenseVector, squaredDistance}
[error]        ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:18: not found: value DenseVector
[error]     DenseVector(line.split(' ').map(_.toDouble))
[error]     ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:26: not found: value squaredDistance
[error]       val tempDist = squaredDistance(p, centers(i))
[error]                      ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:66: type mismatch;
[error]  found   : Vector[Double]
[error]  required: String
[error]       val pointStats = closest.reduceByKey{case ((p1, c1), (p2, c2)) => (p1 + p2, c1 + c2)}
[error]                                                                               ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:73: not found: value squaredDistance
[error]         tempDist += squaredDistance(kPoints(i), newPoints(i))
[error]                     ^
[error] 5 errors found
[error] (compile:compileIncremental) Compilation failed
[error] Total time: 119 s, completed Aug 4, 2016 2:10:30 AM
[error] Not a valid command: sbt-create
[error] Not a valid key: sbt-create (similar: state, sbt-resolver, streams)
[error] sbt-create
[error]           ^
[info] Set current project to SparkKMeans Project (in build file:/home/sc/Desktop/kmeans/)
[info] Compiling 1 Scala source to /home/sc/Desktop/kmeans/target/scala-2.10/classes...
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:4: not found: object breeze
[error] import breeze.linalg.{Vector, DenseVector, squaredDistance}
[error]        ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:18: not found: value DenseVector
[error]     DenseVector(line.split(' ').map(_.toDouble))
[error]     ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:26: not found: value squaredDistance
[error]       val tempDist = squaredDistance(p, centers(i))
[error]                      ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:66: type mismatch;
[error]  found   : Vector[Double]
[error]  required: String
[error]       val pointStats = closest.reduceByKey{case ((p1, c1), (p2, c2)) => (p1 + p2, c1 + c2)}
[error]                                                                               ^
[error] /home/sc/Desktop/kmeans/src/main/scala/SparkKMeans.scala:73: not found: value squaredDistance
[error]         tempDist += squaredDistance(kPoints(i), newPoints(i))
[error]                     ^
[error] 5 errors found
[error] (compile:compileIncremental) Compilation failed
[error] Total time: 49 s, completed Aug 4, 2016 2:11:37 AM
[root@scala kmeans]# 




// scalastyle:off println
package org.apache.spark.examples


import breeze.linalg.{Vector, DenseVector, squaredDistance}


import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.SparkContext._


/**
 * K-means clustering.
 *
 * This is an example implementation for learning how to use Spark. For more conventional use,
 * please refer to org.apache.spark.mllib.clustering.KMeans
 */
object SparkKMeans {


  def parseVector(line: String): Vector[Double] = {
    DenseVector(line.split(' ').map(_.toDouble))
  }


  def closestPoint(p: Vector[Double], centers: Array[Vector[Double]]): Int = {
    var bestIndex = 0
    var closest = Double.PositiveInfinity


    for (i <- 0 until centers.length) {
      val tempDist = squaredDistance(p, centers(i))
      if (tempDist < closest) {
        closest = tempDist
        bestIndex = i
      }
    }


    bestIndex
  }


  def showWarning() {
    System.err.println(
      """WARN: This is a naive implementation of KMeans Clustering and is given as an example!
        |Please use the KMeans method found in org.apache.spark.mllib.clustering
        |for more conventional use.
      """.stripMargin)
  }


  def main(args: Array[String]) {


    if (args.length < 3) {
      System.err.println("Usage: SparkKMeans <file> <k> <convergeDist>")
      System.exit(1)
    }


    showWarning()


    val sparkConf = new SparkConf().setAppName("SparkKMeans")
    val sc = new SparkContext(sparkConf)
    val lines = sc.textFile(args(0))
    val data = lines.map(parseVector _).cache()
    val K = args(1).toInt
    val convergeDist = args(2).toDouble


    val kPoints = data.takeSample(withReplacement = false, K, 42).toArray
    var tempDist = 1.0


    while(tempDist > convergeDist) {
      val closest = data.map (p => (closestPoint(p, kPoints), (p, 1)))


      val pointStats = closest.reduceByKey{case ((p1, c1), (p2, c2)) => (p1 + p2, c1 + c2)}


      val newPoints = pointStats.map {pair =>
        (pair._1, pair._2._1 * (1.0 / pair._2._2))}.collectAsMap()


      tempDist = 0.0
      for (i <- 0 until K) {
        tempDist += squaredDistance(kPoints(i), newPoints(i))
      }


      for (newP <- newPoints) {
        kPoints(newP._1) = newP._2
      }
      println("Finished iteration (delta = " + tempDist + ")")
    }


    println("Final centers:")
    kPoints.foreach(println)
    sc.stop()
  }
}
// scalastyle:on println
版权声明:本文为博主原创文章,未经博主允许不得转载。

相关文章推荐

submit和button的区别

  • 2013-08-08 09:38
  • 49KB
  • 下载

Oracle EBS fnd_request.submit_request 与 Fnd_concurrent.wait_for_ruqest

1、关于fnd_request.submit_request的用法 fnd_request.submit_request的用法: FND_REQUEST.SUBMIT_REQUEST 函数是用来提交一...

html中submit和button的区别

  • 2014-01-20 17:05
  • 34KB
  • 下载

oracle 异步io与oracle 11g等待事件:db file async I/O submit

SQL> select * from v$version; BANNER ---------------------------------------------------------------...

表单提交中的input、button、submit的区别

最近项目中用了很多的表单提交,发现input、button、submit甚至回车键都可以引发表单提交,下面将分别验证他们在使用中的区别。 1.input[type=submit] 我们直接...
内容举报
返回顶部
收藏助手
不良信息举报
您举报文章:深度学习:神经网络中的前向传播和反向传播算法推导
举报原因:
原因补充:

(最多只允许输入30个字)