object Application { private val LOG = LoggerFactory.getLogger(Application.getClass) def main(args: Array[String]): Unit = { System.setProperty("hadoop.home.dir", "F:\\winutils-master\\winutils-master\\hadoop-2.7.1") val args = Array("--conf-file", "KMeans.json") val algorithmArgs = AlgorithmArgs(args) println(algorithmArgs.paramsMap) ArgsCheck.checkArgs(algorithmArgs.paramsMap) val title: String = algorithmArgs.paramsMap.get("title").toString val alg: String = algorithmArgs.paramsMap.get("alg").toString println(title) println(alg) val sparkConf = new SparkConf().setAppName(s"alg").setMaster("local[*]") val sc = new SparkContext(sparkConf) val hiveContext = new HiveContext(sc) hiveContext.createDataFrame(hiveContext.sparkContext.makeRDD(Seq(Row.fromSeq(Seq(2.2, 3.2, 126.69, 96.56, 75.61, 21.4, 26.74, 13.38)))), new StructType(Array(new StructField("Channel", DoubleType), new StructField("Region", DoubleType), new StructField("Fresh", DoubleType), new StructField("Milk", DoubleType), new StructField("Grocery", DoubleType), new StructField("Frozen", DoubleType), new StructField("Detergents_Paper", DoubleType), new StructField("Delicassen", DoubleType)))) .registerTempTable("kmeans"); } }
sparkapp 本地调试
最新推荐文章于 2023-06-25 16:53:57 发布