package hdfs_pro;
import java.util.Scanner;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
public class HShell {
public static void main(String[] args){
String choice;
Scanner sc = new Scanner(System.in);
while(true) {
choice = sc.nextLine();
if(choice.equals("exit")){
System.out.println("HShell have exited");
break;
}
String[] shell=choice.split(" ");
if(shell[2].equals("-r")){
choice=shell[0]+" "+shell[1]+" "+shell[2];
}else{
choice=shell[0]+" "+shell[1];
}
switch (choice) {
case "HShell -rm":
if(shell.length>3){
System.out.println("input error,please input again");
}else {
DeleteFile(shell[2]);
}
break;
case "HShell -rm -r":
if(shell.length>4){
System.out.println("input error,please input again");
}else{
DeleteDir(shell[3]);
}
break;
case "HShell -list":
if (shell.length>3){
System.out.println("input error,please input again");
}else {
ListFile(shell[2]);
}
break;
case "HShell -mv":
if(shell.length>4){
System.out.println("input error,please input again");
}else {
Rename(shell[2],shell[3]);
break;
}
case "HShell -cp":