java hdfs api 2.2_hadoop 2.2.0 通过java api 操作hdfs 报错,求大神指导

hadoop 2.2.0 通过java api 操作hdfs 报错,求大神指导

安装好hadoop 2.2.0后,通过java api 操作hdfs 报错了,详细信息如下:

2014-09-04 16:04:28,967 WARN  [main] util.NativeCodeLoader (NativeCodeLoader.java:(62)) - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable

java.io.IOException: Failed on local exception: com.google.protobuf.InvalidProtocolBufferException: Protocol message end-group tag did not match expected tag.; Host Details : local host is: "USER-20140422SO/192.168.6.33"; destination host is: "h201":9001;

at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:764)

at org.apache.hadoop.ipc.Client.call(Client.java:1351)

at org.apache.hadoop.ipc.Client.call(Client.java:1300)

at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:206)

at $Proxy9.create(Unknown Source)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)

at java.lang.reflect.Method.invoke(Unknown Source)

at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:186)

at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)

at $Proxy9.create(Unknown Source)

at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:227)

at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1389)

at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1382)

at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1307)

at org.apache.hadoop.hdfs.DistributedFileSystem$6.doCall(DistributedFileSystem.java:384)

at org.apache.hadoop.hdfs.DistributedFileSystem$6.doCall(DistributedFileSystem.java:380)

at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)

at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:380)

at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:324)

at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:905)

at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:886)

at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:783)

at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:772)

at TestHadoop.getFileList(TestHadoop.java:94)

at TestHadoop.main(TestHadoop.java:132)

Caused by: com.google.protobuf.InvalidProtocolBufferException: Protocol message end-group tag did not match expected tag.

at com.google.protobuf.InvalidProtocolBufferException.invalidEndTag(InvalidProtocolBufferException.java:94)

at com.google.protobuf.CodedInputStream.checkLastTagWas(CodedInputStream.java:124)

at com.google.protobuf.AbstractParser.parsePartialFrom(AbstractParser.java:202)

at com.google.protobuf.AbstractParser.parsePartialDelimitedFrom(AbstractParser.java:241)

at com.google.protobuf.AbstractParser.parseDelimitedFrom(AbstractParser.java:253)

at com.google.protobuf.AbstractParser.parseDelimitedFrom(AbstractParser.java:259)

at com.google.protobuf.AbstractParser.parseDelimitedFrom(AbstractParser.java:49)

at org.apache.hadoop.ipc.protobuf.RpcHeaderProtos$RpcResponseHeaderProto.parseDelimitedFrom(RpcHeaderProtos.java:2364)

at org.apache.hadoop.ipc.Client$Connection.receiveRpcResponse(Client.java:996)

at org.apache.hadoop.ipc.Client$Connection.run(Client.java:891)

java类

import java.io.ByteArrayInputStream;

import java.io.File;

import java.io.IOException;

import java.net.URI;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FSDataInputStream;

import org.apache.hadoop.fs.FSDataOutputStream;

import org.apache.hadoop.fs.FileStatus;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.IOUtils;

public class TestHadoop {

public static void main(String[] args) {

{

String uri = "hdfs://h201:9001"; //h201 为namenode 的host地址

Configuration conf = new Configuration();

File workaround = new File(".");

System.getProperties().put("hadoop.home.dir",

workaround.getAbsolutePath());

new File("./bin").mkdirs();

try {

new File("./bin/winutils.exe").createNewFile();

} catch (IOException e) {

e.printStackTrace();

}

FileSystem fs = null;

Path path = new Path("/user/root/");

try {

fs = FileSystem.get(URI.create(uri), conf);

FileStatus[] FileStatus=fs.listStatus(path);// fs.listStatus 就报错了,fs的相关操作都一样

} catch (Exception e) {

e.printStackTrace();

}

}

}

项目导入的jar包:

hadoop-2.2.0/share/hadoop/common/*.jar

hadoop-2.2.0/share/hadoop/common/lib/*.jar

hadoop-2.2.0/share/hadoop/hdfs/*.jar

hadoop-2.2.0/share/hadoop/mapreduce/*.jar

hadoop-2.2.0/share/hadoop/yarn/*.jar

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值