1、创建工程
点击project——Maven——next
![712932c6de00d3db382a035fb4acfdc6.png](https://i-blog.csdnimg.cn/blog_migrate/753ee1cb3785cf13ad49f365b68ad727.jpeg)
2、通过maven导入项目依赖jar包
(1)设置maven自动导入依赖jar包
![8838f717574b736ac3725085e0081608.png](https://i-blog.csdnimg.cn/blog_migrate/a5f602cb4ea4a3d31d7451c90c140c75.jpeg)
![45a9c14a1d3ba4c14b79c1c4efa4c4fb.png](https://i-blog.csdnimg.cn/blog_migrate/ddcffb1bd1aa4b1f34d0be27038496dc.jpeg)
勾选 Import Maven projects automatically,点击apply
(2)配置pom.xml文件
![a14bf40af4f6b5f0d7be588b1bcc736f.png](https://i-blog.csdnimg.cn/blog_migrate/f5c6c0ee5291839d262789515472734a.jpeg)
pom.xml配置文件如下:
<?xml version="1.0" encoding="UTF-8"?>4.0.0com.kaikeba.hadoop com.kaikeba.hadoop 1.0-SNAPSHOTjar2.7.3commons-cli commons-cli 1.2commons-logging commons-logging 1.1.3org.apache.hadoop hadoop-mapreduce-client-jobclient ${hadoop.version}org.apache.hadoop hadoop-common ${hadoop.version}org.apache.hadoop hadoop-hdfs 2.7.3org.apache.hadoop hadoop-hdfs ${hadoop.version}org.apache.hadoop hadoop-mapreduce-client-app ${hadoop.version}org.apache.hadoop hadoop-mapreduce-client-hs ${hadoop.version}
3、编写HDFS读写程序
****把本地文件传输到HDFS****package com.kaikeba.hadoop.hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;import java.io.*;import java.net.URI;/** * 将本地文件系统的文件通过java-API写入到HDFS文件 */public class FileCopyFromLocal { public static void main(String[] args) { String source = "E:aa.mp4"; //获取/data的m目录存在(根据自己的环境更改) String destination = "hdfs://122.51.241.109:9000/data/hdfs01.mp4"; InputStream in = null; try { in = new BufferedInputStream(new FileInputStream(source)); //HDFS读写的配置文件 Configuration conf = new Configuration(); //生成一个文件系统对象 FileSystem fs = FileSystem.get(URI.create(destination),conf); //生成一个输出流 OutputStream out = fs.create(new Path(destination)); IOUtils.copyBytes(in, out, 4096, true); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }}
**把HDFS中的文件传输到本地**package com.kaikeba.hadoop.hdfs;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.FSDataInputStream;import org.apache.hadoop.fs.FileSystem;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IOUtils;import java.io.BufferedOutputStream;import java.io.FileOutputStream;import java.io.IOException;import java.net.URI;/** * 从HDFS读取文件 * 打包运行jar包 [bruce@node-01 Desktop]$ hadoop jar com.kaikeba.hadoop-1.0-SNAPSHOT.jar com.kaikeba.hadoop.hdfs.FileReadFromHdfs */public class FileReadFromHdfs { public static void main(String[] args) { try { // String srcFile = "hdfs://122.51.241.109:9000/data/hdfs01.mp4"; Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(srcFile),conf); FSDataInputStream hdfsInStream = fs.open(new Path(srcFile)); BufferedOutputStream outputStream = new BufferedOutputStream(new FileOutputStream("/opt/hdfs01.mp4")); IOUtils.copyBytes(hdfsInStream, outputStream, 4096, true); } catch (IOException e) { e.printStackTrace(); } }}
4、通过运行jar包的方式验证
双击package
![4c05759b362d23d46009ee68d6fe63f2.png](https://i-blog.csdnimg.cn/blog_migrate/20e462a8d4f6891bda75c79408f26fe2.jpeg)
![442c590c286189d0bd3589e771bba3bc.png](https://i-blog.csdnimg.cn/blog_migrate/8e619c573affedacc3c122ea2266d9f3.jpeg)
生成com.kaikeba.hadoop-1.0-SNAPSHOT.jar,考入到服务器中执行
执行命令:hadoop jar com.kaikeba.hadoop-1.0-SNAPSHOT.jar com.kaikeba.hadoop.hdfs.FileReadFromHdfs
注意: com.kaikeba.hadoop.hdfs.FileReadFromHdfs是全类名,根据自己的项目更改。
![f101452950d6e5ae30473810585be28c.png](https://i-blog.csdnimg.cn/blog_migrate/706f050d763d215dac7229e9e5ad2930.jpeg)