package com.zhanbk.hadoop;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class HDFSApp {
/**
* get filesystem
* @return
* @throws IOException
*/
public static FileSystem getFileSystem() throws IOException{
//core-site.xml, core-default.xml, hdfs-site.xml, hdfs-default.xml
Configuration conf = new Configuration();
//get filesystem
FileSystem fileSystem = FileSystem.get(conf);
return fileSystem;
}
/**
* read file
* @param fileName
* @throws IOException
*/
public static void readFile(String fileName) throws IOException{
//get filesystem
FileSystem fileSystem = getFileSystem();
Path readPath = new Path(fileName); //read path
//open file
FSDataInputStream inStream = fileSystem.open(readPath);
//
try{
//read
IOUtils.copyBytes(inStream, System.out, 4096, false);
}catch(Exception e){
e.printStackTrace();
}finally {
IOUtils.closeStream(inStream);
}
}
public static void putFileToHdfs(String srcFile, String tarFile) throws IOException{
//get fileSystem
FileSystem fileSystem = getFileSystem();
//write path
Path writePath = new Path(tarFile);
//output stream
FSDataOutputStream outputStream = fileSystem.create(writePath);
//file input stream
FileInputStream inStream = new FileInputStream(new File(srcFile));
try{
//read and write
IOUtils.copyBytes(inStream, outputStream, 4096, false);
}catch(Exception e){
e.printStackTrace();
}finally {
IOUtils.closeStream(inStream);
IOUtils.closeStream(outputStream);
}
}
public static void main(String[] args) throws IOException{
// String fileName = "/input_hbase/fruit.tsv";
// readFile(fileName);
putFileToHdfs("/data/datas/fruit.tsv", "/input_hbase/fruit2.tsv");
}
}