package com.da.hbase.tool.utils;
import com.da.hbase.tool.common.Const;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;/**
* hdfs操作常用方法类*/
public classHdfsUtils {public static final Logger LOG= LoggerFactory.getLogger(HdfsUtils.class);/**
* 通过ip直接连接hdfs
* @param ip
* @return*/
public staticFileSystem getFsFromIp(String ip){
FileSystem fs= null;try{
fs=FileSystem.get(URI.create("hdfs://"+ip),newConfiguration());
}catch(IOException e) {
LOG.error("此ip:{} 连接出现异常", ip);
}returnfs;
}/**
* 检查该fs是否可用
* @param fs
* @return*/
public staticBoolean checkFs(FileSystem fs){
Boolean success=true;if(nu