AbstractMethodError: org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider.getProxy
解决 1 查看是否版本 不一致
解决 2 查看jar 包冲突 用 maven helper 插件搜索 hdfs common 等看看是否有冲突 一般处于都是这两个原因
package com.umetrap.domp.dao;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider;
@Slf4j
public class HDFSDao {
public static void main(String[] args)
throws URISyntaxException, IOException, InterruptedException {
FileSystem fs=null;
try {
Configuration conf = new Configuration();
//这里 的配置参考hadoop官网然后 根hdfs 人员沟通填写 主要是两个主节点的 ip和端口
conf.set("fs.defaultFS", "hdfs://xxcluster");
conf.set("dfs.nameservices", "xxcluster");
conf.set("dfs.ha.namenodes.xxcluster", "namenode197,namenode200");
conf.set("dfs.namenode.rpc-address.umecdhcluster.namenode197", "IP:port namenode节点");
conf.set("dfs.namenode.rpc-address.umecdhcluster.namenode200", "IP:port namenode节点");
conf.set("dfs.client.failover.proxy.provider.xxcluster", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
fs = FileSystem.get(new URI("hdfs://xxcluster"), conf, "hdfs");
} catch (Exception e) {
log.error("createFileSystem", e);
}
ContentSummary contentSummary = fs.getContentSummary(
new Path("/user"));
long spaceConsumed = contentSummary.getSpaceConsumed();
long length = contentSummary.getLength();
long fileCount = contentSummary.getFileCount();
System.out.println(spaceConsumed+"*********"+length+"******"+fileCount);
}
}