pom文件
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>Spark01</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.12</artifactId>
<version>3.5.2</version>
</dependency>
</dependencies>
</project>
代码示例:
package com.care.Spark01;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import java.util.Arrays;
import java.util.List;
public class Spark_RDD_Disk {
public static void main(String[] args) {
final SparkConf conf = new SparkConf();
conf.setMaster("local");
conf.setAppName("spark");
final JavaSparkContext jsc = new JavaSparkContext(conf);
//TUDO 构建RDD数据处理模型
// 利用环境对象对接文件数据源,构建RDD对象
final JavaRDD<String> rdd = jsc.textFile("C:\\Users\\74794\\IdeaProjects\\Spark01\\data\\test.txt");
final List<String> collect = rdd.collect();
collect.forEach(System.out::println);
jsc.close();
}
}
data:test.txt 本地文件数据
运行结果: