hdfs java接口_HDFS java api接口测试demo

[目录:   一、什么是kafka   二、kafka的官方网站在哪里?   三、在哪里下载?需要哪些组件的支持?   四、如何安装?   五、FAQ   六、扩展阅读     一、

1. 创建mapreduce工程,设置hadoop home

2. 创建HDFSUtil  类

package Bruce.Hadoop.HDFSManger;[HDFS的JAVA接口API操作实例  20:55 2010-6-2运行环境:Hadoop.0.20.2CentOS 5.4 java version 1.6.0_20-ea配置的是单机Hadoop环境先看下我的运行截图主要参考这篇文章http

import java.util.Iterator;

import java.util.Map.Entry;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FSDataInputStream;

import org.apache.hadoop.fs.FSDataOutputStream;

import org.apache.hadoop.fs.FileStatus;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.hdfs.DistributedFileSystem;

import org.apache.hadoop.hdfs.protocol.DatanodeInfo;

public class HDFSUtil {

public synchronized static FileSystem getFileSystem(String ip, int port) {

FileSystem fs = null;

String url = "hdfs://" + ip + ":" + String.valueOf(port);

Configuration config = new Configuration();

config.set("fs.default.name", url);

try {

fs = FileSystem.get(config);

} catch (Exception e) {

e.printStackTrace();

}

return fs;

}

public synchronized static void listNode(FileSystem fs) {

DistributedFileSystem dfs = (DistributedFileSystem) fs;

try {

DatanodeInfo[] infos = dfs.getDataNodeStats();

for (DatanodeInfo node : infos) {

System.out.println("HostName: " + node.getHostName() + "/n"

+ node.getDatanodeReport());

System.out.println("--------------------------------");

}

} catch (Exception e) {

e.printStackTrace();

}

}

/**

* 打印系统配置

*

* @param fs

*/

public synchronized static void listConfig(FileSystem fs) {

Iterator> entrys = fs.getConf().iterator();

while (entrys.hasNext()) {

Entry item = entrys.next();

System.out.println(item.getKey() + ": " + item.getValue());

}

}

/**

* 创建目录和父目录

*

* @param fs

* @param dirName

*/

public synchronized static void mkdirs(FileSystem fs, String dirName) {

// Path home = fs.getHomeDirectory();

Path workDir = fs.getWorkingDirectory();

String dir = workDir + "/" + dirName;

Path src = new Path(dir);

// FsPermission p = FsPermission.getDefault();

boolean succ;

try {

succ = fs.mkdirs(src);

if (succ) {

System.out.println("create directory " + dir + " successed. ");

} else {

System.out.println("create directory " + dir + " failed. ");

}

} catch (Exception e) {

e.printStackTrace();

}

}

/**

* 删除目录和子目录

*

* @param fs

* @param dirName

*/

public synchronized static void rmdirs(FileSystem fs, String dirName) {

// Path home = fs.getHomeDirectory();

Path workDir = fs.getWorkingDirectory();

String dir = workDir + "/" + dirName;

Path src = new Path(dir);

boolean succ;

try {

succ = fs.delete(src, true);

if (succ) {

System.out.println("remove directory " + dir + " successed. ");

} else {

System.out.println("remove directory " + dir + " failed. ");

}

} catch (Exception e) {

e.printStackTrace();

}

}

/**

* 上传目录或文件

*

* @param fs

* @param local

* @param remote

*/

public synchronized static void upload(FileSystem fs, String local,

String remote) {

// Path home = fs.getHomeDirectory();

Path workDir = fs.getWorkingDirectory();

Path dst = new Path(workDir + "/" + remote);

Path src = new Path(local);

try {

fs.copyFromLocalFile(false, true, src, dst);

System.out.println("upload " + local + " to " + remote + " successed. ");

} catch (Exception e) {

e.printStackTrace();

}

}

/**

* 下载目录或文件

*

* @param fs

* @param local

* @param remote

*/

public synchronized static void download(FileSystem fs, String local,

String remote) {

// Path home = fs.getHomeDirectory();

Path workDir = fs.getWorkingDirectory();

Path dst = new Path(workDir + "/" + remote);

Path src = new Path(local);

try {

fs.copyToLocalFile(false, dst, src);

System.out.println("download from " + remote + " to " + local

+ " successed. ");

} catch (Exception e) {

e.printStackTrace();

}

}

/**

* 字节数转换

*

* @param size

* @return

*/

public synchronized static String convertSize(long size) {

String result = String.valueOf(size);

if (size < 1024 * 1024) {

result = String.valueOf(size / 1024) + " KB";

} else if (size >= 1024 * 1024 && size < 1024 * 1024 * 1024) {

result = String.valueOf(size / 1024 / 1024) + " MB";

} else if (size >= 1024 * 1024 * 1024) {

result = String.valueOf(size / 1024 / 1024 / 1024) + " GB";

} else {

result = result + " B";

}

return result;

}

/**

* 遍历HDFS上的文件和目录

*

* @param fs

* @param path

*/

public synchronized static void listFile(FileSystem fs, String path) {

Path workDir = fs.getWorkingDirectory();

Path dst;

if (null == path || "".equals(path)) {

dst = new Path(workDir + "/" + path);

} else {

dst = new Path(path);

}

try {

String relativePath = "";

FileStatus[] fList = fs.listStatus(dst);

for (FileStatus f : fList) {

if (null != f) {

relativePath = new StringBuffer()

.append(f.getPath().getParent()).append("/")

.append(f.getPath().getName()).toString();

if (f.isDir()) {

listFile(fs, relativePath);

} else {

System.out.println(convertSize(f.getLen()) + "/t/t"

+ relativePath);

}

}

}

} catch (Exception e) {

e.printStackTrace();

} finally {

}

}

public synchronized static void write(FileSystem fs, String path,

String data) {

// Path home = fs.getHomeDirectory();

Path workDir = fs.getWorkingDirectory();

Path dst = new Path(workDir + "/" + path);

try {

FSDataOutputStream dos = fs.create(dst);

dos.writeUTF(data);

dos.close();

System.out.println("write content to " + path + " successed. ");

} catch (Exception e) {

e.printStackTrace();

}

}

public synchronized static void append(FileSystem fs, String path,

String data) {

// Path home = fs.getHomeDirectory();

Path workDir = fs.getWorkingDirectory();

Path dst = new Path(workDir + "/" + path);

try {

FSDataOutputStream dos = fs.append(dst);

dos.writeUTF(data);

dos.close();

System.out.println("append content to " + path + " successed. ");

} catch (Exception e) {

e.printStackTrace();

}

}

public synchronized static String read(FileSystem fs, String path) {

String content = null;

// Path home = fs.getHomeDirectory();

Path workDir = fs.getWorkingDirectory();

Path dst = new Path(workDir + "/" + path);

try {

// reading

FSDataInputStream dis = fs.open(dst);

content = dis.readUTF();

dis.close();

System.out.println("read content from " + path + " successed. ");

} catch (Exception e) {

e.printStackTrace();

}

return content;

}

}3. 创建测试用例

package Bruce.Hadoop.HDFSManger;

//必须 是hadoop程序才行

import java.io.IOException;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

public class HDFSTest {

/**

* @param args

*/

public static void main(String[] args) {

// TODO Auto-generated method stub

FileSystem fs = HDFSUtil.getFileSystem("192.168.100.3", 9000);

HDFSUtil.listNode(fs); //打印各个node信息

String Dir = "input";

String FileName = "Name.txt";

try {

if(!fs.exists(new Path("input")))

{

HDFSUtil.mkdirs(fs, Dir);

System.out.println("mkdir" + Dir);

}

else

{

System.out.println( Dir + " exists!");

}

} catch (IOException e) {

// TODO Auto-generated catch block

e.printStackTrace();

}

HDFSUtil.write(fs, Dir+"/"+FileName, "bruce wang"); //会重写的

//HDFSUtil.append(fs, Dir+"/"+FileName, "/ntest-测试2"); //最好不用对hdfs文件进行追加操作。支持 性不好

System.out.println("write " + Dir+"/"+FileName);

String sFileContend = HDFSUtil.read(fs, Dir+"/"+FileName);

System.out.println(sFileContend);

System.out.println("read " + Dir+"/"+FileName);

}

}4. run as hadoop,得到:

12/02/12 01:17:57 WARN conf.Configuration: DEPRECATED: hadoop-site.xml found in the classpath. Usage of hadoop-site.xml is deprecated. Instead use core-site.xml, mapred-site.xml and hdfs-site.xml to override properties of core-default.xml, mapred-default.xml and hdfs-default.xml respectively

HostName: BruceWangUbuntu/nName: 192.168.100.3:50010

Decommission Status : Normal

Configured Capacity: 20608348160 (19.19 GB)

DFS Used: 24609 (24.03 KB)

Non DFS Used: 4905893855 (4.57 GB)

DFS Remaining: 15702429696(14.62 GB)

DFS Used%: 0%

DFS Remaining%: 76.19%

Last contact: Sun Feb 12 01:17:48 CST 2012

--------------------------------

input exists!

write content to input/Name.txt successed.

write input/Name.txt

read content from input/Name.txt successed.

bruce wang

read input/Name.txt

[已经对Stream API的用法鼓吹够多了,用起简洁直观,但性能到底怎么样呢?会不会有很高的性能损失?本节我们对Stream API的性能一探究竟。]

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值