创建新目录
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. Path ;
public class CreateDir {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String dirPath = args[ 0 ] ;
Path hdfsPath = new Path ( dirPath) ;
if ( fs. mkdirs ( hdfsPath) ) {
System . out. println ( "Directory " + dirPath + " has been created successfully!" ) ;
}
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
判断文件夹是否存在
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. Path ;
public class DirExist {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String dirName = args[ 0 ] ;
if ( fs. exists ( new Path ( dirName ) ) ) {
System . out. println ( "Directory Exists!" ) ;
} else {
System . out. println ( "Directory not Exists!" ) ;
}
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
显示一个指定文件夹下所有文件
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FileStatus ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. FileUtil ;
import org. apache. hadoop. fs. Path ;
public class ListFiles {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String filePath = args[ 0 ] ;
Path srcPath = new Path ( filePath) ;
FileStatus [ ] stats = fs. listStatus ( srcPath) ;
Path [ ] paths = FileUtil . stat2Paths ( stats) ;
for ( Path p : paths)
System . out. println ( p. getName ( ) ) ;
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
删除目录
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. Path ;
public class DeleteDir {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String dirPath = args[ 0 ] ;
Path hdfsPath = new Path ( dirPath) ;
if ( fs. delete ( hdfsPath, true ) ) {
System . out. println ( "Directory " + dirPath + " has been deleted successfully!" ) ;
}
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
创建新文件
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. Path ;
public class CreateFile {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String filePath = args[ 0 ] ;
Path hdfsPath = new Path ( filePath) ;
fs. create ( hdfsPath) ;
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
判断文件是否存在
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. Path ;
public class FileExist {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String fileName = args[ 0 ] ;
if ( fs. exists ( new Path ( fileName) ) ) {
System . out. println ( "File Exists!" ) ;
} else {
System . out. println ( "File not Exists!" ) ;
}
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
从本地拷贝文件到新创建的目录中
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FSDataOutputStream ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. FileUtil ;
import org. apache. hadoop. fs. Path ;
import org. apache. hadoop. fs. RemoteIterator ;
public class CopyFileToFolder {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String source = args[ 0 ] ;
String target = args[ 1 ] ;
Path sourcePath = new Path ( source) ;
Path targetPath = new Path ( target) ;
if ( fs. mkdirs ( targetPath) ) {
System . out. println ( "Directory " + targetPath + " has been created successfully!" ) ;
}
FileUtil . copy ( fs, sourcePath, fs, targetPath, true , conf) ;
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
将HDFS中指定文件的内容输出到终端中
创建新文件
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. Path ;
public class CreateFile {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String filePath = args[ 0 ] ;
Path hdfsPath = new Path ( filePath) ;
fs. create ( hdfsPath) ;
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
写文件
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FSDataOutputStream ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. Path ;
public class WriteFile {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String filePath = args[ 0 ] ;
Path srcPath = new Path ( filePath) ;
FSDataOutputStream os = fs. create ( srcPath, true , 1024 , ( short ) 1 , ( long ) ( 1 << 26 ) ) ;
String str = "Hello, this is a sentence that should be written into the file.\n" ;
os. write ( str. getBytes ( ) ) ;
os. flush ( ) ;
os. close ( ) ;
os = fs. append ( srcPath) ;
str = "Hello, this is another sentence that should be written into the file.\n" ;
os. write ( str. getBytes ( ) ) ;
os. flush ( ) ;
os. close ( ) ;
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
读文件到终端
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FSDataInputStream ;
import org. apache. hadoop. fs. FSDataOutputStream ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. Path ;
public class ReadFile {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String filePath = args[ 0 ] ;
Path srcPath = new Path ( filePath) ;
FSDataInputStream is = fs. open ( srcPath) ;
while ( true ) {
String line = is. readLine ( ) ;
if ( line == null ) {
break ;
}
System . out. println ( line) ;
}
is. close ( ) ;
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
完成指定目录下指定类型文件的合并
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FileStatus ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. FileUtil ;
import org. apache. hadoop. fs. Path ;
public class ListFiles {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String filePath = args[ 0 ] ;
Path srcPath = new Path ( filePath) ;
FileStatus [ ] stats = fs. listStatus ( srcPath) ;
Path [ ] paths = FileUtil . stat2Paths ( stats) ;
for ( Path p : paths)
System . out. println ( p. getName ( ) ) ;
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
将文件从源路径移动到目的路径
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. Path ;
public class MoveFile {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String source= args[ 0 ] ;
String target= args[ 1 ] ;
Path sourcePath = new Path ( source) ;
Path targetPath = new Path ( target) ;
if ( ! fs. exists ( targetPath) ) {
fs. rename ( sourcePath, targetPath) ;
} else {
System . out. println ( "File Exists!" ) ;
}
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}
删除文件
package hdfs_pro ;
import org. apache. hadoop. conf. Configuration ;
import org. apache. hadoop. fs. FileSystem ;
import org. apache. hadoop. fs. Path ;
public class DeleteFile {
public static void main ( String [ ] args) {
try {
Configuration conf = new Configuration ( ) ;
FileSystem fs = FileSystem . get ( conf) ;
String filePath = args[ 0 ] ;
Path hdfsPath = new Path ( filePath) ;
if ( fs. delete ( hdfsPath, false ) ) {
System . out. println ( "File " + filePath + " has been deleted successfully!" ) ;
}
} catch ( Exception e) {
e. printStackTrace ( ) ;
}
}
}