/**
* Created by Administrator on 2018/5/11 0011.
*/
import org.apache.spark._
import java.io.File
import scala.math.random
object randomOne {
def main(args:Array[String]): Unit ={
//windows 环境运行spark需要添加代码
val path = new File(".").getCanonicalPath()
//File workaround = new File(".");
System.getProperties().put("hadoop.home.dir", path);
new File("./bin").mkdirs();
new File("./bin/winutils.exe").createNewFile();
val conf = new SparkConf().setAppName("spark test").setMaster("local")
val spark = new SparkContext(conf)
val n = math.min(10000L,Int.MaxValue).toInt
val count = spark.parallelize(1 until n,2).map {
i =>
val x= random *2 -1
val y = random *2 -1
if (x*x + y*y < 1) 1 else 0
}.reduce(_+_)
println("pi is roughly" + 4.0 * count /n)
spark.stop()
}
}
spark 在windows环境中运行报错,需要添加代码
最新推荐文章于 2024-04-30 15:51:57 发布