package hadoop;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
* hadoop 工具类
* 上传文件-----------upload(String ip,String srcPath,String desPath)
* 下载文件-----------download(String ip,String srcPath,String desPath )
* 遍历文件-----------list(String ip,String path)
* 删除文件-----------delete(String ip,String path,boolean boStr)
* 创建文件夹----------mkdir(String ip,String path)
* 创建并写入文件-------write(String txt,String ip,String path)
* 重命名文件----------rename(String ip,String path,String oldName,String newName)
* 判断文件是否存在------exists(String ip,String pathAndName);
* 查看文件在集群中的位置--fileLoc(String ip,String filePath)
* 文件最后一次修改时间---getTime(String ip,String path)
* @author Administrator
*
*/
public class Hadoop{
private static FileSystem fs;
public void upload(String ip,String srcPath,String desPath){
//创建一个Configuration
Configuration conf = new Configuration();
System.out.println("运行较慢,请耐心等待...");
//创建FileSystem对象
try {
fs = FileSystem.get(new URI("hdfs://"+ip+":9000"), conf,"root");
} catch (IOException | InterruptedException | URISyntaxException e) {
e.printStackTrace();
}
//把本地磁盘中的文件上传到hdfs的某个目录下
try {
fs.copyFromLocalFile(false, new Path(srcPath), new Path(desPath));
System.out.println("运行结果 : 上传成功");
//关闭
fs.close();
} catch (IllegalArgumentException | IOEx