判断文件并删除文件
package com.doit.hdp.day01;
import com.doit.hdp.Utils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class Demo03 {
public static void main(String[] args) throws Exception {
FileSystem fs = Utils.getHdfsFs();
Path path = new Path("/wnn");
if(fs.exists(path)){
fs.delete(path, true);
}else{
System.out.println("没有该文件无法删除");
}
fs.close();
}
}
package com.doit.hdp.day01;
import com.doit.hdp.Utils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class YiDong {
public static void main(String[] args) throws Exception {
FileSystem fs= Utils.getHdfsFs();
//移到已经存在的某个目录下
fs.rename(new Path("/a.txt"),new Path("/user/a.txt"));
//把目录名字进行改写
fs.rename(new Path("/user"),new Path("/USER"));
fs.close();
}
}
查看命令如下:
[root@linux01 /]# hdfs dfs -ls /
Found 2 items
-rw-r--r-- 3 root supergroup 0 2021-04-26 10:47 /jar.txt
drwxr-xr-x - root supergroup 0 2021-04-26 20:16 /user
[root@linux01 /]# hdfs dfs -ls /user
Found 2 items
drwxr-xr-x - root supergroup 0 2021-04-26 20:09 /user/a.txt
drwxr-xr-x - root supergroup 0 2021-04-25 20:54 /user/root
[root@linux01 /]# hdfs dfs -ls /
Found 2 items
drwxr-xr-x - root supergroup 0 2021-04-26 20:16 /USER
-rw-r--r-- 3 root supergroup 0 2021-04-26 10:47 /jar.txt
package com.doit.hdp.day01;
import com.doit.hdp.Utils;
import org.apache.hadoop.fs.*;
import java.sql.Array;
import java.util.Arrays;
public class BianLi {
public static void main(String[] args) throws Exception {
FileSystem fs = Utils.getHdfsFs();
//递归地去遍历这个文件系统的文件,不是文件夹哦,返回一个迭代器
RemoteIterator<LocatedFileStatus> loca = fs.listFiles(new Path("/"),false);
while(loca.hasNext()){
LocatedFileStatus file = loca.next();
Path path = file.getPath();
String name = path.getName();
// System.out.println(path+"----"+name);
file.getBlockSize();//逻辑切块的大小 128M
file.getReplication();//副本
file.getLen();//实际长度
BlockLocation[] bls = file.getBlockLocations();//实际的物理切块
for (BlockLocation bl:bls) {
long length = bl.getLength();
String[] hosts = bl.getHosts();
//把数组转化成list集合
System.out.println(path+"....."+length+",,"+ Arrays.asList(hosts));
}
}
fs.close();
//查看文件和文件夹
// fs.listStatus();
}
}
package com.doit.hdp.day01;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.net.URI;
public class Demo05 {
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
conf.set("dfs.replication","2");//自定义设置副本个数
FileSystem fs = FileSystem.newInstance(new URI("hdfs://linux01:8020"), conf, "root");
fs.copyFromLocalFile(new Path("d://text.rar"),new Path("/tt.rar"));
fs.close();
}
}
package com.doit.hdp.day01;
import com.doit.hdp.Utils;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class Demo06 {
public static void main(String[] args) throws Exception {
FileSystem fs = Utils.getHdfsFs();
//遍历文件和文件夹
FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
for (FileStatus fileStatuse: fileStatuses
) {
Path path = fileStatuse.getPath();
String name = path.getName();
System.out.println(path);
System.out.println(name);
}
fs.close();
}
}
package com.doit.hdp.day01;
import com.doit.hdp.Utils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.BufferedReader;
import java.io.InputStreamReader;
public class Demo07 {
public static void main(String[] args) throws Exception {
FileSystem fs = Utils.getHdfsFs();
FSDataInputStream is = fs.open(new Path("/m.txt"));
is.skip(20);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String s = br.readLine();
System.out.println(s);
br.close();
is.close();
fs.close();
}
}