package sy3
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
object tjhs {
def main(args: Array[String]) {
val logFile = "E:/IntelliJ IDEA 2019.3.3/WorkSpace/MyScala/src/main/scala/sy3/test1"
//val logFile = "hdfs://192.168.249.128:9000/mapreduce/input1/in1.txt"
val conf = new SparkConf().setAppName("Simple Application")
val sc = new SparkContext(conf)
val logData = sc.textFile(logFile, 2)
val num = logData.count()
println("这个文件有 %d 行!".format(num))
}
}
Spark读取文件统计行数
最新推荐文章于 2024-05-18 15:23:51 发布