import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.Test;
public class Read {
FileSystem fs = null;
@Before
public void init() throws Exception {
Configuration conf = new Configuration();
conf.set("dfs.replication", "2");
conf.set("dfs.blocksize", "128m");
FileSystem.get(new URI("hdfs://hadoop1:9000"), conf, "root");
}
/*
* 向hdfs中写数据
*/
@Test
public void testwrite() throws Exception {
FSDataOutputStream out = fs.create(new Path("/yy.jpg"), false);
FileInputStream in = new FileInputStream("F:/images/5468/02.gif");
byte[] buf = new byte[1024];
int read = 0;
while((read=in.read(buf)) != -1) {
out.write(buf, 0, read);
}
out.close();
fs.close();
}
}
05使用api向hdfs中写入数据
最新推荐文章于 2022-12-14 09:11:52 发布