2 自行在本地文件系统生成一个大约100字节的文本文件,写一段程序,读入这个文件,并将其第101-120字节的内容写入HDFS成为一个新文件import java.io.InputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
/**
* @author feixu
*/
public class FileSystemCat {
/**
* main
* @param args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
String uri = args[0];
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri), conf);
InputStream in = null;
try {
in = fs.open(new Path(uri));
IOUtils.copyBytes(in, System.out, 4096, false);
} finally {
IOUtils.closeStream(in);
}
}
}
3 反向操作,在HDFS中生成一个大约100字节的文本文件,写一段程序,读入这个文件,并将其第101-120字节的内容写入本地文件系统成为一个新文件import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
/**
* @author feixu
* @see FSDataInputStream
* @see FSDataOutputStream
*/
public class WriteToHdfs {
/**
* This method is for reading 101~120 bytes from local file and write to hdfs
* @param args which arg[0] should be local disk uri and arg[1] should be hdfs uri
* @throws Exception
*/
public static void main(String[] args) throws Exception{
//hdfs uri
String dst = args[1];
//get hdfs file system instance
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
FSDataOutputStream out = null;
try{
//open hdfs output stream
out = fs.create(new Path(dst));
//write the data from local disk to hdfs
out.write(readFromLocal(args));
}finally{
IOUtils.closeStream(out);
}
}
/**
* readFromLocal which read 101~120 bytes from the local disk file
* @param args
* @return
* @throws Exception
*/
private static byte[] readFromLocal(String[] args) throws Exception{
//local disk uri
String uri = args[0];
//get local disk file system instance
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
FSDataInputStream in = null;
try{
//open local disk input stream
in = fs.open(new Path(uri));
//read 101~120 bytes
byte[] buffer = new byte[1024];
in.read(101, buffer, 0, 20);
return buffer;
}finally{
IOUtils.closeStream(in);
}
}
}
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
/**
* @author feixu
* @see FSDataInputStream
* @see FSDataOutputStream
*
*/
public class WriteToLocal {
/**
* This method is for reading 101~120 bytes from hdfs file and write to local disk
* @param args which arg[0] should be hdfs uri and arg[1] should be local disk uri
* @throws Exception
*/
public static void main(String[] args) throws Exception{
//local disk uri
String dst = args[1];
//get local file system instance
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
FSDataOutputStream out = null;
try{
//open local file's output stream
out = fs.create(new Path(dst));
//write the data from hdfs to local disk file
out.write(readFromHdfs(args));
}finally{
IOUtils.closeStream(out);
}
}
/**
* readFromHdfs which read 101~120 bytes from the hdfs file
* @param args
* @return byte[]
* @throws Exception
*/
public static byte[] readFromHdfs(String[] args) throws Exception{
//hdfs uri
String uri = args[0];
//get hdfs file system instance
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri), conf);
FSDataInputStream in = null;
try{
//open hdfs input stream
in = fs.open(new Path(uri));
//read 101~120 bytes
byte[] buffer = new byte[1024];
in.read(101, buffer, 0, 20);
return buffer;
}finally{
IOUtils.closeStream(in);
}
}
}