API_01
Configuration conf = new Configuration();
Iterator<Entry<String, String>> it = conf.iterator();
while(it.hasNext()) {
System.out.println(it.next());
}
FileSystem fs = FileSystem.get(URI.create("hdfs://192.168.248.143:9000"),conf,"hadoop");
boolean b = fs.mkdirs(new Path("/testMkdir"));
boolean b = fs.delete(new Path("/testMkdir"),true);
fs.copyFromLocalFile(new Path("d:/aa.txt"),new Path("/access.log.copy"));
RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(new Path("/"), true);
while (iterator.hasNext()){
LocatedFileStatus status = iterator.next();
BlockLocation[] locations = status.getBlockLocations();
for (BlockLocation location : locations) {
System.out.println("块长度:"+location.getLength());
System.out.println("块名称:"+ Arrays.asList(location.getHosts()));
}
}
FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
for (FileStatus status : fileStatuses) {
System.out.println(status.getPath()+":"+ status.isDirectory());
}
API_02
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
/**
* @author snow
*/
public class ClientTest02 {
public static void main(String[] args) {
}
/**
* 从指定位置读取文件
* @throws IOException
* @throws InterruptedException
*/
public static void testRead() throws IOException, InterruptedException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create("hdfs://192.168.248.143:9000"), conf, "hadoop");
FSDataInputStream in = fs.open(new Path("/test02"));
in.seek(12);
FileOutputStream os = new FileOutputStream("E:\\day07\\aa.txt");
IOUtils.copy(in,os);
}
/**
* 通过流的方式下载文件
* @throws IOException
* @throws InterruptedException
*/
public static void testDownloadByStream() throws IOException, InterruptedException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create("hdfs://192.168.248.143:9000"), conf, "hadoop");
FSDataInputStream inputStream = fs.open(new Path("/test02"));
FileOutputStream os = new FileOutputStream("E:\\day07\\aa.txt");
IOUtils.copy(inputStream,os);
}
/**
* 通过流的方式上传文件
* @throws IOException
* @throws InterruptedException
*/
public static void testUploadByStream() throws IOException, InterruptedException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create("hdfs://192.168.248.143:9000"), conf, "hadoop");
FSDataOutputStream outputStream = fs.create(new Path("/test02"));
FileInputStream inputStream = new FileInputStream("d:/aa.txt");
IOUtils.copy(inputStream,outputStream);
}
}