import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class Damo {
public static void main(String[] args) {
new Damo().readSecBlockInfo();
}
private void readSecBlockInfo() {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hadoop01:9000");
conf.set("dfs.replication","1");
try {
FileSystem fileSystem = FileSystem.get(conf);
//获取输入流
FSDataInputStream open = fileSystem.open(new Path("/eclipse_hadoop.zip"));
//从第二个块
open.seek(1024*1024*128);//128M
//输出流
FileOutputStream fos = new FileOutputStream("D://0321");
//连接
IOUtils.copyBytes(open, fos, conf);
System.out.println("下载成功");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}