java操作HDFS (获取文件系统,创建目录等)

    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-common</artifactId>
        <version>2.6.0</version>
        <exclusions>
            <exclusion>
                <artifactId>commons-configuration</artifactId>
                <groupId>commons-configuration</groupId>
            </exclusion>
            <exclusion>
                <artifactId>commons-compress</artifactId>
                <groupId>org.apache.commons</groupId>
            </exclusion>
            <exclusion>
                <artifactId>commons-logging</artifactId>
                <groupId>commons-logging</groupId>
            </exclusion>
            <exclusion>
                <artifactId>jackson-core-asl</artifactId>
                <groupId>org.codehaus.jackson</groupId>
            </exclusion>
            <exclusion>
                <artifactId>jackson-mapper-asl</artifactId>
                <groupId>org.codehaus.jackson</groupId>
            </exclusion>
            <exclusion>
                <artifactId>guava</artifactId>
                <groupId>com.google.guava</groupId>
            </exclusion>
            <exclusion>
                <artifactId>servlet-api</artifactId>
                <groupId>javax.servlet</groupId>
            </exclusion>
            <exclusion>
                <artifactId>log4j</artifactId>
                <groupId>log4j</groupId>
            </exclusion>
            <exclusion>
                <artifactId>netty</artifactId>
                <groupId>io.netty</groupId>
            </exclusion>
            <exclusion>
                <artifactId>slf4j-log4j12</artifactId>
                <groupId>org.slf4j</groupId>
            </exclusion>
        </exclusions>
    </dependency>
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-hdfs</artifactId>
        <version>2.6.0</version>
        <exclusions>
            <exclusion>
                <artifactId>commons-logging</artifactId>
                <groupId>commons-logging</groupId>
            </exclusion>
            <exclusion>
                <artifactId>guava</artifactId>
                <groupId>com.google.guava</groupId>
            </exclusion>
        </exclusions>
    </dependency>


    <!--20190313wjs add-->
    <dependency>
        <groupId>com.alibaba</groupId>
        <artifactId>druid</artifactId>
        <version>1.0.17</version>
    </dependency>

    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-service</artifactId>
        <version>1.2.1</version>
        <exclusions>
            <exclusion>
                <artifactId>log4j</artifactId>
                <groupId>log4j</groupId>
            </exclusion>
            <exclusion>
                <artifactId>guava</artifactId>
                <groupId>com.google.guava</groupId>
            </exclusion>
            <exclusion>
                <artifactId>commons-compress</artifactId>
                <groupId>org.apache.commons</groupId>
            </exclusion>
            <exclusion>
                <artifactId>commons-logging</artifactId>
                <groupId>commons-logging</groupId>
            </exclusion>
            <exclusion>
                <artifactId>activation</artifactId>
                <groupId>javax.activation</groupId>
            </exclusion>
            <exclusion>
                <artifactId>mail</artifactId>
                <groupId>javax.mail</groupId>
            </exclusion>
            <exclusion>
                <artifactId>commons-lang</artifactId>
                <groupId>commons-lang</groupId>
            </exclusion>
            <exclusion>
                <artifactId>commons-collections</artifactId>
                <groupId>commons-collections</groupId>
            </exclusion>
            <exclusion>
                <artifactId>antlr-runtime</artifactId>
                <groupId>org.antlr</groupId>
            </exclusion>
            <exclusion>
                <artifactId>commons-lang</artifactId>
                <groupId>commons-lang</groupId>
            </exclusion>
            <exclusion>
                <artifactId>commons-compiler</artifactId>
                <groupId>org.codehaus.janino</groupId>
            </exclusion>
            <exclusion>
                <artifactId>commons-httpclient</artifactId>
                <groupId>commons-httpclient</groupId>
            </exclusion>
            <exclusion>
                <artifactId>slf4j-log4j12</artifactId>
                <groupId>org.slf4j</groupId>
            </exclusion>

            <exclusion>
                <artifactId>slf4j-log4j12</artifactId>
                <groupId>org.slf4j</groupId>
            </exclusion>

            <exclusion>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-exec</artifactId>
            </exclusion>

        </exclusions>


    </dependency>

    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-metastore</artifactId>
        <version>1.2.1</version>
        <exclusions>
            <exclusion>
                <artifactId>guava</artifactId>
                <groupId>com.google.guava</groupId>
            </exclusion>
            <exclusion>
                <artifactId>avro</artifactId>
                <groupId>org.apache.avro</groupId>
            </exclusion>
            <exclusion>
                <artifactId>jsr305</artifactId>
                <groupId>com.google.code.findbugs</groupId>
            </exclusion>
            <exclusion>
                <artifactId>slf4j-log4j12</artifactId>
                <groupId>org.slf4j</groupId>
            </exclusion>
        </exclusions>
    </dependency>

    <dependency>
        <groupId>org.apache.hive</groupId>
        <artifactId>hive-jdbc</artifactId>
        <version>1.2.1</version>
        <exclusions>
            <exclusion>
                <artifactId>guava</artifactId>
                <groupId>com.google.guava</groupId>
            </exclusion>
            <exclusion>
                <artifactId>log4j</artifactId>
                <groupId>log4j</groupId>
            </exclusion>
            <exclusion>
                <artifactId>commons-compress</artifactId>
                <groupId>org.apache.commons</groupId>
            </exclusion>
            <exclusion>
                <artifactId>json</artifactId>
                <groupId>org.json</groupId>
            </exclusion>
        </exclusions>
    </dependency>

//import com.cosmosource.controller.HDFS.HDFSController;

import com.alibaba.fastjson.JSON;
import com.cosmosource.core.common.Const;
import com.cosmosource.core.common.GlobalDefine;
import com.cosmosource.model.FileInfo;
import com.cosmosource.model.ResultModel;
import net.sf.ehcache.Cache;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

public class HDFSUtil {
private static Logger logger = LoggerFactory.getLogger(HDFSUtil.class);

//    private static String HDFSUri = "hdfs://10.8.177.29:8020";

// private static String HDFSUri = “hdfs://192.168.0.152:8020”;
private static String HDFSUri = GlobalDefine.hdfs_url;

/**
 * 获取文件系统
 *
 * @return FileSystem 文件系统
 */
public static FileSystem getFileSystem() {
    //读取配置文件
    Configuration conf = new Configuration();
    conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
    // 文件系统
    FileSystem fs = null;
    String hdfsUri = HDFSUri;
    if (StringUtils.isBlank(hdfsUri)) {
        // 返回默认文件系统  如果在 Hadoop集群下运行,使用此种方法可直接获取默认文件系统
        try {
            fs = FileSystem.get(conf);
        } catch (IOException e) {
            e.printStackTrace();
        }
    } else {
        // 返回指定的文件系统,如果在本地测试,需要使用此种方法获取文件系统
        try {
            URI uri = new URI(hdfsUri.trim());
            fs = FileSystem.get(uri, conf, "hdfs");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    return fs;
}

/**
 * 创建文件目录
 *
 * @param path 文件路径
 */
public static Boolean mkdir(String path) {
    try {
        FileSystem fs = getFileSystem();
        System.out.println("FilePath=" + path);
        if (!fs.exists(new Path(path))) {
            // 创建目录
            fs.mkdirs(new Path(path));
            logger.info("[mkdir] 创建文件目录成功,path=[{}]", path);
        }
        fs.close();//释放资源
        return true;
    } catch (Exception e) {
        logger.error("[mkdir] 创建文件目录失败,path=[{}]", path, e);
        return false;
    }
}

/**
 * 判断目录是否存在
 *
 * @param filePath 目录路径
 * @param create   若不存在是否创建
 */
public static boolean existDir(String filePath, boolean create) {
    boolean flag = false;
    if (StringUtils.isEmpty(filePath)) {
        logger.error("[existDir] 文件目录为空,filePath=[{}]", filePath);
        return flag;
    }
    try {
        Path path = new Path(filePath);
        FileSystem fs = getFileSystem();// FileSystem对象
        if (create) {
            if (!fs.exists(path)) {
                fs.mkdirs(path);
            }
        }
        if (fs.isDirectory(path)) {

            flag = true;
        }
    } catch (Exception e) {
        logger.error("[existDir] 判断目录是否存在出现异常,filePath=[{}]", filePath, e);
    }
    return flag;
}

/**
 * 本地文件上传至 HDFS
 *
 * @param srcFile  源文件 路径
 * @param destPath hdfs路径
 */
public static Boolean copyFileToHDFS(String srcFile, String destPath) {
    try {
        FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
        Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        FileSystem fs = FileSystem.get(URI.create(HDFSUri + destPath), config, "hdfs");
        OutputStream os = fs.create(new Path(destPath));
        //copy
        IOUtils.copyBytes(fis, os, 4096, true);
        fs.close();
        logger.info("[copyFileToHDFS] 本地文件上传至HDFS成功,srcFile:{},destPath:{}", srcFile, destPath);
        return true;
    } catch (Exception e) {
        logger.error("[copyFileToHDFS] 本地文件上传至HDFS失败,srcFile:{},destPath:{}", srcFile, destPath, e);
        return false;
    }
}

/**
 * 本地文件上传至 HDFS
 *
 * @param srcFile  源文件 路径
 * @param destPath hdfs路径
 */
public static Boolean copyFileToHDFSOld2(File srcFile, String destPath) {
    try {

// FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
FileInputStream fis = new FileInputStream(srcFile);//读取本地文件
Configuration config = new Configuration();
config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
FileSystem fs = FileSystem.get(URI.create(HDFSUri + destPath), config, “hdfs”);
// Path path = new Path(destPath);

        OutputStream os = fs.create(new Path(destPath));
        //copy
        IOUtils.copyBytes(fis, os, 4096, true);
        fs.close();
        logger.info("[copyFileToHDFS] 本地文件上传至HDFS成功,srcFile:{},destPath:{}", srcFile, destPath);
        return true;
    } catch (Exception e) {
        logger.error("[copyFileToHDFS] 本地文件上传至HDFS失败,srcFile:{},destPath:{}", srcFile, destPath, e);
        return false;
    }
}

/**
 * 编辑内容上传至 HDFS
 *
 * @param srcFile      源文件 路径
 * @param destPathFlag hdfs路径
 */
public static Boolean copyEditStringToHDFSOld(String srcFile, String destPathFlag, String editString) {
    try {
        FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
        Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        config.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
        config.setBoolean("dfs.support.append", true);
        String destPath = null;
        FileSystem fs = null;
        FSDataOutputStream outputStream = null;
        if (destPathFlag == "sql") {

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “sql.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}
} else if (destPathFlag == “python”) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “python.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}
} else if (destPathFlag == “scala”) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “scala.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}

        }

        outputStream.write(editString.getBytes());

        outputStream.flush();

        outputStream.close();

        fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,srcFile:{},destPathFlag:{}", srcFile, destPathFlag);
return true;
} catch (Exception e) {
logger.error("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS失败,srcFile:{},destPathFlag:{}", srcFile, destPathFlag, e);
return false;
}
}

/**
 * 编辑内容上传至 HDFS
 *
 * @param destPathFlag hdfs路径
 */
public static Boolean copyEditStringToHDFSOld3(/*String srcFile, */String destPathFlag, String editString) {
    try {

// FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
Configuration config = new Configuration();
config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
config.setBoolean(“dfs.support.append”, true);
String destPath = null;
FileSystem fs = null;
FSDataOutputStream outputStream = null;

// List editSqlList = new ArrayList<>();
// List editPythonList = new ArrayList<>();
// List editScalaList = new ArrayList<>();
/**
* 通用缓存
*/
Cache cache = EhcacheUtil.getCache(Const.GDF);
String setSqlkey = “wjsSqlRunHistory”;
String setPythonkey = “wjsPythonRunHistory”;
String setScalakey = “wjsScalaRunHistory”;

// EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey, String.valueOf(expires)));
// Object suo=EhcacheUtil.getObjectValue(cache, setNxkey);

        if (destPathFlag.equals("sql")) {
            StringBuilder sql = new StringBuilder();
            String sqlEditString = sql.append(editString).append("\n").toString();

// GlobalDefine.editSqlList=(List) EhcacheUtil.getObjectValue(cache, setSqlkey);
// GlobalDefine.editSqlList.add(editString);
GlobalDefine.editSqlList.add(sqlEditString);
//执行过程 先将编辑的内容存入缓存 再将hdfs上的sql.log数据删除 最后再将最新的数据
EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey, GlobalDefine.editSqlList));

            GlobalDefine.editSqlList = (List<String>) EhcacheUtil.getObjectValue(cache, setSqlkey);
            if (GlobalDefine.editSqlList.size() > 50) {
                GlobalDefine.editSqlList.remove(0);
            }

// StringBuilder sql = new StringBuilder();
// sql.append(GlobalDefine.editSqlList).append("\r\n");
// outputStream.write(sql.toString().getBytes());

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “sql.log”;
String rmfilePath = destPathBase + “sql.log”;
//获取文件系统
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
rmfiledir(rmfilePath);
}
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// outputStream.write(sql.toString().getBytes());
StringBuilder sql2 = new StringBuilder();
for (String editSql : GlobalDefine.editSqlList) {
sql2.append(editSql);
}
outputStream.write(sql2.toString().getBytes());

        } else if (destPathFlag.equals("python")) {
            StringBuilder python = new StringBuilder();
            String pythonEditString = python.append(editString).append("\n").toString();

// GlobalDefine.editPythonList=(List) EhcacheUtil.getObjectValue(cache, setPythonkey);
// GlobalDefine.editSqlList.add(editString);
GlobalDefine.editPythonList.add(pythonEditString);

// GlobalDefine.editPythonList.add(editString);
EhcacheUtil.put(cache, EhcacheUtil.newElement(setPythonkey, GlobalDefine.editPythonList));

            GlobalDefine.editPythonList = (List<String>) EhcacheUtil.getObjectValue(cache, setPythonkey);
            if (GlobalDefine.editPythonList.size() > 50) {
                GlobalDefine.editPythonList.remove(0);
            }

// StringBuilder python = new StringBuilder();
// python.append(GlobalDefine.editPythonList).append("\r\n");

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “python.log”;
String rmfilePath = destPathBase + “python.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
rmfiledir(rmfilePath);
}
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);

// outputStream.write(GlobalDefine.editPythonList.toString().getBytes());

            StringBuilder python2 = new StringBuilder();
            for (String editPython : GlobalDefine.editPythonList) {
                python2.append(editPython);
            }
            outputStream.write(python2.toString().getBytes());


        } else if (destPathFlag.equals("scala")) {

            StringBuilder scala = new StringBuilder();
            String scalaEditString = scala.append(editString).append("\n").toString();

// GlobalDefine.editScalaList=(List) EhcacheUtil.getObjectValue(cache, setScalakey);
// GlobalDefine.editSqlList.add(editString);
GlobalDefine.editScalaList.add(scalaEditString);

// GlobalDefine.editScalaList.add(editString);
EhcacheUtil.put(cache, EhcacheUtil.newElement(setScalakey, GlobalDefine.editScalaList));

            GlobalDefine.editScalaList = (List<String>) EhcacheUtil.getObjectValue(cache, setScalakey);
            if (GlobalDefine.editScalaList.size() > 50) {
                GlobalDefine.editScalaList.remove(0);
            }

// StringBuilder python = new StringBuilder();
// python.append(GlobalDefine.editScalaList).append("\r\n");

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “scala.log”;
String rmfilePath = destPathBase + “scala.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
rmfiledir(rmfilePath);
}
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);

// outputStream.write(GlobalDefine.editScalaList.toString().getBytes());

            StringBuilder scala2 = new StringBuilder();
            for (String editScala : GlobalDefine.editScalaList) {
                scala2.append(editScala);
            }
            outputStream.write(scala2.toString().getBytes());


        }

// outputStream.write(editString.getBytes());

        outputStream.flush();

        outputStream.close();

        fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPathFlag:{}", destPathFlag);
return true;
} catch (Exception e) {
logger.error("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS失败,destPathFlag:{}", destPathFlag, e);
return false;
}
}

/**
 * 编辑内容上传至 HDFS
 *
 * @param
 */
public static Boolean copyEditStringToHDFSOld3_schedule(/*String srcFile, *//*String destPathFlag, String editString*/) {
    try {

// FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
Configuration config = new Configuration();
config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
config.setBoolean(“dfs.support.append”, true);
String destPath = null;
FileSystem fs = null;
FSDataOutputStream outputStream = null;

// List editSqlList = new ArrayList<>();
// List editPythonList = new ArrayList<>();
// List editScalaList = new ArrayList<>();
/**
* 通用缓存
*/
Cache cache = EhcacheUtil.getCache(Const.GDF);
String setSqlkey = “wjsSqlRunHistory”;
String setPythonkey = “wjsPythonRunHistory”;
String setScalakey = “wjsScalaRunHistory”;

        String destPathFlag_sql = "sql";
        String destPathFlag_python = "python";
        String destPathFlag_scala = "scala";

// EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey, String.valueOf(expires)));
// Object suo=EhcacheUtil.getObjectValue(cache, setNxkey);

        if (destPathFlag_sql.equals("sql")) {

// StringBuilder sql = new StringBuilder();
// String sqlEditString=sql.append(editString).append("\n").toString();
//
//
GlobalDefine.editSqlList.add(editString);
// GlobalDefine.editSqlList.add(sqlEditString);
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey,GlobalDefine.editSqlList));
//
GlobalDefine.editSqlList = (List) EhcacheUtil.getObjectValue(cache, setSqlkey);
if (GlobalDefine.editSqlList.size() != 0) {

                String jsonSqlString = JSON.toJSONString(GlobalDefine.editSqlList);

// if (GlobalDefine.editSqlList.size() > 50) {
// GlobalDefine.editSqlList.remove(0);
// }

// StringBuilder sql = new StringBuilder();
// sql.append(GlobalDefine.editSqlList).append("\r\n");
// outputStream.write(sql.toString().getBytes());

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “sqlSchedule.log”;
String rmfilePath = destPathBase + “sqlSchedule.log”;

                fs = FileSystem.get(URI.create(destPath), config, "hdfs");
                if (fs.exists(new Path(destPath))) {
                    // 创建目录

// fs.mkdirs(new Path(path));
rmfiledir(rmfilePath);
}
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// outputStream.write(sql.toString().getBytes());
StringBuilder sql2 = new StringBuilder();
// for (String editSql : GlobalDefine.editSqlList) {
// sql2.append(editSql);
// }
sql2.append(jsonSqlString);
outputStream.write(sql2.toString().getBytes());

                outputStream.flush();

                outputStream.close();

                fs.close();

            }

        }
        if (destPathFlag_python.equals("python")) {

// StringBuilder python = new StringBuilder();
// String pythonEditString=python.append(editString).append("\n").toString();
//
//
GlobalDefine.editSqlList.add(editString);
// GlobalDefine.editPythonList.add(pythonEditString);
//
//
GlobalDefine.editPythonList.add(editString);
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setPythonkey,GlobalDefine.editPythonList));

            GlobalDefine.editPythonList = (List<String>) EhcacheUtil.getObjectValue(cache, setPythonkey);
            if (GlobalDefine.editPythonList.size() != 0) {

                String jsonPythonString = JSON.toJSONString(GlobalDefine.editPythonList);

// if (GlobalDefine.editPythonList.size() > 50) {
// GlobalDefine.editPythonList.remove(0);
// }

// StringBuilder python = new StringBuilder();
// python.append(GlobalDefine.editPythonList).append("\r\n");

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “pythonSchedule.log”;
String rmfilePath = destPathBase + “pythonSchedule.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
rmfiledir(rmfilePath);
}
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);

// outputStream.write(GlobalDefine.editPythonList.toString().getBytes());

                StringBuilder python2 = new StringBuilder();

// for (String editPython : GlobalDefine.editPythonList) {
// python2.append(editPython);
// }
python2.append(jsonPythonString);
outputStream.write(python2.toString().getBytes());

                outputStream.flush();

                outputStream.close();

                fs.close();

            }


        }

        if (destPathFlag_scala.equals("scala")) {

// StringBuilder scala = new StringBuilder();
// String scalaEditString=scala.append(editString).append("\n").toString();
//
//
GlobalDefine.editSqlList.add(editString);
// GlobalDefine.editScalaList.add(scalaEditString);
//
//
GlobalDefine.editScalaList.add(editString);
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setScalakey,GlobalDefine.editScalaList));

            GlobalDefine.editScalaList = (List<String>) EhcacheUtil.getObjectValue(cache, setScalakey);
            if (GlobalDefine.editScalaList.size() != 0) {

                String jsonScalaString = JSON.toJSONString(GlobalDefine.editScalaList);

// if (GlobalDefine.editScalaList.size() > 50) {
// GlobalDefine.editScalaList.remove(0);
// }

// StringBuilder python = new StringBuilder();
// python.append(GlobalDefine.editScalaList).append("\r\n");

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “scalaSchedule.log”;
String rmfilePath = destPathBase + “scalaSchedule.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
rmfiledir(rmfilePath);
}
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);

// outputStream.write(GlobalDefine.editScalaList.toString().getBytes());

                StringBuilder scala2 = new StringBuilder();

// for (String editScala : GlobalDefine.editScalaList) {
// scala2.append(editScala);
// }
scala2.append(jsonScalaString);
outputStream.write(scala2.toString().getBytes());

                outputStream.flush();

                outputStream.close();

                fs.close();

            }


        }

// outputStream.write(editString.getBytes());

// outputStream.flush();
//
// outputStream.close();
//
// fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功");
return true;
} catch (Exception e) {
logger.error("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS失败", e);
return false;
}
}

// /**
// * 编辑内容上传至 HDFS
// *
// * @param
// */
// public static Boolean getInitRunHistory_schedule(/String srcFile, //String destPathFlag, String editString/) {
// /

// * 通用缓存
// */
// Cache cache = EhcacheUtil.getCache(Const.GDF);
// String setSqlkey =“wjsSqlRunHistory”;
// String setPythonkey =“wjsPythonRunHistory”;
// String setScalakey =“wjsScalaRunHistory”;
//
// String destPathFlag_sql = “sql”;
// String destPathFlag_python = “python”;
// String destPathFlag_scala = “scala”;
//
// if (destPathFlag_sql .equals(“sql”) ) {
//
// } else if (destPathFlag_python.equals(“python”)) {
//
// } else if (destPathFlag_scala.equals(“scala”)) {
//
// }
//
// }
//

/**
 * 版本历史信息上传至 HDFS
 *
 * @param destPath hdfs路径
 */
public static Boolean copyVersionInfoToHDFS(/*String srcFile, */String destPath, String filePathName, String versionInfo) {
    try {

// FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
Configuration config = new Configuration();
config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
config.setBoolean(“dfs.support.append”, true);
// String destPath = null;
FileSystem fs = null;
FSDataOutputStream outputStream = null;

// List editSqlList = new ArrayList<>();
// List editPythonList = new ArrayList<>();
// List editScalaList = new ArrayList<>();
/**
* 通用缓存
*/
Cache cache = EhcacheUtil.getCache(Const.GDF);
String setVersionInfo = “wjsVersionInfoHistory”;

// if (destPathFlag .equals(“sql”) ) {
StringBuilder versionInfoBuilder = new StringBuilder();
String versionInfoEditString = versionInfoBuilder.append(versionInfo).append("\n").toString();

// GlobalDefine.editSqlList.add(editString);
GlobalDefine.versionInfoList.add(versionInfoEditString);
EhcacheUtil.put(cache, EhcacheUtil.newElement(setVersionInfo, GlobalDefine.versionInfoList));

        GlobalDefine.versionInfoList = (List<String>) EhcacheUtil.getObjectValue(cache, setVersionInfo);

// if (GlobalDefine.editSqlList.size() > 50) {
// GlobalDefine.editSqlList.remove(0);
// }

// StringBuilder sql = new StringBuilder();
// sql.append(GlobalDefine.editSqlList).append("\r\n");
// outputStream.write(sql.toString().getBytes());

        //String destPathBase = "/user/wjs/oozie-apps/";
        destPath = HDFSUri + destPath + "/" + filePathName + ".version";

// String rmfilePath = destPathBase + “sql.log”;

        fs = FileSystem.get(URI.create(destPath), config, "hdfs");
        if (fs.exists(new Path(destPath))) {
            // 创建目录
            //fs.mkdirs(new Path(path));
            rmfiledir(destPath);
        }
        outputStream = fs.create(new Path(destPath));
        logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);

// outputStream.write(sql.toString().getBytes());
StringBuilder sql2 = new StringBuilder();
for (String editSql : GlobalDefine.versionInfoList) {
sql2.append(editSql);
}
outputStream.write(sql2.toString().getBytes());

// outputStream.write(editString.getBytes());

        outputStream.flush();

        outputStream.close();

        fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath:{}", destPath);
return true;
} catch (Exception e) {
logger.error("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS失败,destPath:{}", destPath, e);
return false;
}
}

/**
 * OozieString上传至HDFS
 *
 * @param oozieString 源文件 路径
 * @param destPath    hdfs路径
 */
public static Boolean copyOozieStringToHDFS(String oozieString, String destPath) {
    try {

        Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        URI uri = new URI(HDFSUri + "/");  // 连接资源位置
        FileSystem fs = FileSystem.get(uri, config, "hdfs");

// fs.createNewFile(new Path(destPath+"/",“oozie.xml”));

        //FileInputStream fis=new FileInputStream(new File(destPath));//读取本地文件

// Configuration config=new Configuration();
// config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
// config.set(“fs.file.impl”,org.apache.hadoop.fs.LocalFileSystem.class.getName());
// config.setBoolean(“dfs.support.append”,true);
//String destPath = null;
// FileSystem fs = null;
FSDataOutputStream outputStream = null;
outputStream = fs.create(new Path(destPath + “/oozie.xml”), true);

// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “oozie.xml”;
// destPath=destPath+"/oozie.xml";
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path("/oozie.xml"),true);
//outputStream = fs.createNewFile(new Path(destPath+"/",“oozie.xml”));
logger.info("[copyOozieStringToHDFS] OozieString上传至HDFS成功,destPath=[{}]", destPath);
// }

// if (destPathFlag==“sql”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “sql.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“python”){
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “python.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“scala”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “scala.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
//
// }

        outputStream.write(oozieString.getBytes("UTF-8"));

//
// //IOUtils.writeFully(oozieString.getBytes(),outputStream);
// //IOUtils.w()
//
outputStream.flush();
outputStream.close();
/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
fs.close();
logger.info("[copyOozieStringToHDFS] OozieString上传至HDFS成功,destPath=[{}]", destPath);
return true;
} catch (Exception e) {
logger.error("[copyOozieStringToHDFS] OozieString上传至HDFS失败,destPath=[{}]", destPath, e);
return false;
}
}

/**
 * OozieString上传至HDFS
 *
 * @param oozieString 源文件 路径
 * @param destPath    hdfs路径
 */
public static Boolean copyOozieStringToHDFSOld(String oozieString, String destPath) {
    try {

        Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        URI uri = new URI(HDFSUri + "/");  // 连接资源位置
        FileSystem fs = FileSystem.get(uri, config, "hdfs");

// fs.createNewFile(new Path(destPath+"/",“oozie.xml”));

        //FileInputStream fis=new FileInputStream(new File(destPath));//读取本地文件

// Configuration config=new Configuration();
// config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
// config.set(“fs.file.impl”,org.apache.hadoop.fs.LocalFileSystem.class.getName());
// config.setBoolean(“dfs.support.append”,true);
//String destPath = null;
// FileSystem fs = null;
FSDataOutputStream outputStream = null;
outputStream = fs.create(new Path(destPath + “/oozie.xml”), true);

// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “oozie.xml”;
// destPath=destPath+"/oozie.xml";
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path("/oozie.xml"),true);
//outputStream = fs.createNewFile(new Path(destPath+"/",“oozie.xml”));
logger.info("[copyOozieStringToHDFSOld] OozieString上传至HDFS成功,destPath=[{}]", destPath);
// }

// if (destPathFlag==“sql”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “sql.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“python”){
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “python.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“scala”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “scala.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
//
// }

        outputStream.write(oozieString.getBytes("UTF-8"));

//
// //IOUtils.writeFully(oozieString.getBytes(),outputStream);
// //IOUtils.w()
//
outputStream.flush();
//
outputStream.close();
//
fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
fs.close();
logger.info("[copyOozieStringToHDFSOld] OozieString上传至HDFS成功,destPath=[{}]", destPath);
return true;
} catch (Exception e) {
logger.error("[copyOozieStringToHDFSOld] OozieString上传至HDFS失败,destPath=[{}]", destPath, e);
return false;
}
}

/**
 * 编辑内容上传至 HDFS
 *
 * @param srcFile      源文件 路径
 * @param destPathFlag hdfs路径
 */
public static Boolean copyEditStringToHDFS(String srcFile, String destPathFlag, List<String> editStringList, String classPath) {
    try {
        FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
        Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        config.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
        config.setBoolean("dfs.support.append", true);
        String destPath = null;
        FileSystem fs = null;
        FSDataOutputStream outputStream = null;
        if (destPathFlag == "sql") {

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “sql.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}
} else if (destPathFlag == “python”) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “python.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}
} else if (destPathFlag == “scala”) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “scala.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}

        }
        Long originRowCount = getRowCountByFile(destPath, fs);
        Long addRowCount = (long) editStringList.size();
        if (originRowCount + addRowCount > 1000) {
            Long deleteCount = originRowCount + addRowCount - 1000;

            deleteRowCountByFile(destPath, fs, deleteCount, classPath, destPathFlag);

        }


        StringBuilder sb = new StringBuilder();
        for (int i = 0; i < editStringList.size(); i++) {
            sb.append(editStringList.get(i)).append("\r\n");
        }
        outputStream.write(sb.toString().getBytes());

        outputStream.flush();

        outputStream.close();

        fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,srcFile:{},destPathFlag:{}", srcFile, destPathFlag);
return true;
} catch (Exception e) {
logger.error("[copyEditStringToHDFS] 编辑内容上传至 HDFS失败,srcFile:{},destPathFlag:{}", srcFile, destPathFlag, e);
return false;
}
}

/**
 * 编辑内容上传至 HDFS
 *
 * @param destPathFlag hdfs路径
 */
public static Boolean copyEditStringToHDFSOld2(/*String srcFile, */String destPathFlag, String editStringList, String classPath) {
    try {

// FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
Configuration config = new Configuration();
config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
config.setBoolean(“dfs.support.append”, true);
String destPath = null;
FileSystem fs = null;
FSDataOutputStream outputStream = null;
if (destPathFlag.equals(“sql”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “sql.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
} else {
outputStream = fs.append(new Path(destPath));
}
} else if (destPathFlag.equals(“python”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “python.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
} else {
outputStream = fs.append(new Path(destPath));
}
} else if (destPathFlag.equals(“scala”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “scala.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
} else {
outputStream = fs.append(new Path(destPath));
}

        }
        Long originRowCount = getRowCountByFile(destPath, fs);

// Long addRowCount = (long) editStringList.size();
if (originRowCount + 1 > 50) {
Long deleteCount = originRowCount + 1 - 50;

            deleteRowCountByFile(destPath, fs, deleteCount, classPath, destPathFlag);

        }


        StringBuilder sb = new StringBuilder();

// for (int i = 0; i < editStringList.size(); i++) {
sb.append(editStringList).append("\r\n");
// }
outputStream.write(sb.toString().getBytes());

        outputStream.flush();

        outputStream.close();

        fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPathFlag:{}", destPathFlag);
return true;
} catch (Exception e) {
logger.error("[copyEditStringToHDFS] 编辑内容上传至 HDFS失败,destPathFlag:{}", destPathFlag, e);
return false;
}
}

/**
 * 编辑内容上传至 HDFS
 *
 * @param destPathFlag hdfs路径
 */
public static Boolean copyNodeEditStringToHDFS(String destPathFlag, String nodeEditString, String destPath, String nodeId) {
    try {


        Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        URI uri = new URI(HDFSUri + "/");  // 连接资源位置
        FileSystem fs = FileSystem.get(uri, config, "hdfs");

// fs.createNewFile(new Path(destPath+"/",“oozie.xml”));

        //FileInputStream fis=new FileInputStream(new File(destPath));//读取本地文件

// Configuration config=new Configuration();
// config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
// config.set(“fs.file.impl”,org.apache.hadoop.fs.LocalFileSystem.class.getName());
// config.setBoolean(“dfs.support.append”,true);
//String destPath = null;
// FileSystem fs = null;

        FSDataOutputStream outputStream = null;

// String newDestPath = “”;

        if (destPathFlag.equals("sql")) {

// destPath = destPath + “/sql.xml”;
destPath = destPath + “/” + nodeId + “.sql”;
} else if (destPathFlag.equals(“python”)) {
// destPath = destPath + “/python.xml”;
destPath = destPath + “/” + nodeId + “.py”;
} else if (destPathFlag.equals(“scala”)) {
// destPath = destPath + “/scala.xml”;
destPath = destPath + “/” + nodeId + “.scala”;
}
if (fs.exists(new Path(destPath))) {
rmfiledir(destPath);
}
outputStream = fs.create(new Path(destPath), true);

// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “oozie.xml”;
// destPath=destPath+"/oozie.xml";
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path("/oozie.xml"),true);
//outputStream = fs.createNewFile(new Path(destPath+"/",“oozie.xml”));
logger.info("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS成功,destPath=[{}]", destPath);
// }

// if (destPathFlag==“sql”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “sql.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“python”){
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “python.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“scala”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “scala.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
//
// }

        outputStream.write(nodeEditString.getBytes("UTF-8"));

//
// //IOUtils.writeFully(oozieString.getBytes(),outputStream);
// //IOUtils.w()
//
outputStream.flush();
//
outputStream.close();
//
fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
fs.close();
logger.info("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS成功,destPath=[{}]", destPath);
return true;
} catch (Exception e) {
logger.error("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS失败,destPath=[{}]", destPath, e);
return false;
}

}

/**
 * 编辑内容上传至 HDFS
 *
 * @param destPathFlag hdfs路径
 */
public static String copyNodeEditStringToHDFSNew2(String destPathFlag, String nodeEditString, String destPath, String nodeId) {
    try {


        /*Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        URI uri = new URI(HDFSUri + "/");  // 连接资源位置
        FileSystem fs = FileSystem.get(uri, config, "hdfs");*/
        FileSystem fs = getFileSystem();

// fs.createNewFile(new Path(destPath+"/",“oozie.xml”));

        //FileInputStream fis=new FileInputStream(new File(destPath));//读取本地文件

// Configuration config=new Configuration();
// config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
// config.set(“fs.file.impl”,org.apache.hadoop.fs.LocalFileSystem.class.getName());
// config.setBoolean(“dfs.support.append”,true);
//String destPath = null;
// FileSystem fs = null;

        FSDataOutputStream outputStream = null;

// String newDestPath = “”;

        if (destPathFlag.equals("sql")) {

// destPath = destPath + “/sql.xml”;
destPath = destPath + “/” + nodeId + “.sql”;
} else if (destPathFlag.equals(“python”)) {
// destPath = destPath + “/python.xml”;
destPath = destPath + “/” + nodeId + “.py”;
} else if (destPathFlag.equals(“scala”)) {
// destPath = destPath + “/scala.xml”;
destPath = destPath + “/” + nodeId + “.scala”;
}
if (fs.exists(new Path(destPath))) {
rmfiledir(destPath);
}
outputStream = fs.create(new Path(destPath), true);

// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “oozie.xml”;
// destPath=destPath+"/oozie.xml";
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path("/oozie.xml"),true);
//outputStream = fs.createNewFile(new Path(destPath+"/",“oozie.xml”));
logger.info("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS成功,destPath=[{}]", destPath);
// }

// if (destPathFlag==“sql”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “sql.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“python”){
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “python.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“scala”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “scala.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
//
// }

        outputStream.write(nodeEditString.getBytes("UTF-8"));

//
// //IOUtils.writeFully(oozieString.getBytes(),outputStream);
// //IOUtils.w()
//
outputStream.flush();
//
outputStream.close();
//
fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
fs.close();
logger.info("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS成功,destPath=[{}]", destPath);
return HDFSUri + destPath;
} catch (Exception e) {
logger.error("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS失败,destPath=[{}]", destPath, e);
return null;
}

}


/**
 * 编辑内容上传至 HDFS
 *
 * @param filePath hdfs路径
 */
public static Boolean isExistHdfsPath(String filePath) {
    try {
       /* Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        URI uri = new URI(HDFSUri + "/");  // 连接资源位置
        FileSystem fs = FileSystem.get(uri, config, "hdfs");*/
        FileSystem fs = getFileSystem();
        if (!fs.exists(new Path(filePath))) {

// rmfiledir(destPath);
return false;
}
logger.info("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS成功,filePath=[{}]", filePath);
return true;
} catch (Exception e) {
logger.error("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS失败,filePath=[{}]", filePath, e);
return false;
}

}


/**
 * 编辑内容上传至 HDFS
 *
 * @param destPathFlag hdfs路径
 */
public static String copyNodeEditStringToHDFS2(String destPathFlag, String nodeEditString, String destPath, String nodeId) {
    try {


        Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        URI uri = new URI(HDFSUri + "/");  // 连接资源位置
        FileSystem fs = FileSystem.get(uri, config, "hdfs");

// fs.createNewFile(new Path(destPath+"/",“oozie.xml”));

        //FileInputStream fis=new FileInputStream(new File(destPath));//读取本地文件

// Configuration config=new Configuration();
// config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
// config.set(“fs.file.impl”,org.apache.hadoop.fs.LocalFileSystem.class.getName());
// config.setBoolean(“dfs.support.append”,true);
//String destPath = null;
// FileSystem fs = null;

        FSDataOutputStream outputStream = null;

// String newDestPath = “”;

// if (destPathFlag .equals(“sql”)) {
destPath = destPath + “/sql.xml”;
// destPath = destPath + “/”+nodeId+".sql";
// } else if (destPathFlag .equals( “python”)) {
destPath = destPath + “/python.xml”;
// destPath = destPath + “/”+nodeId+".py";
// } else if (destPathFlag .equals(“scala”) ) {
destPath = destPath + “/scala.xml”;
// destPath = destPath + “/”+nodeId+".scala";
// }
if (destPathFlag.equals(“hive”)) {
// destPath = destPath + “/sql.xml”;
destPath = destPath + “/” + nodeId + “.hive”;
} else if (destPathFlag.equals(“shell”)) {
// destPath = destPath + “/python.xml”;
destPath = destPath + “/” + nodeId + “.shell”;
}
if (fs.exists(new Path(destPath))) {
rmfiledir(destPath);
}
outputStream = fs.create(new Path(destPath), true);

// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “oozie.xml”;
// destPath=destPath+"/oozie.xml";
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path("/oozie.xml"),true);
//outputStream = fs.createNewFile(new Path(destPath+"/",“oozie.xml”));
logger.info("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS成功,destPath=[{}]", destPath);
// }

// if (destPathFlag==“sql”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “sql.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“python”){
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “python.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“scala”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “scala.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
//
// }

        outputStream.write(nodeEditString.getBytes("UTF-8"));

//
// //IOUtils.writeFully(oozieString.getBytes(),outputStream);
// //IOUtils.w()
//
outputStream.flush();
//
outputStream.close();
//
fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
fs.close();
logger.info("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS成功,destPath=[{}]", destPath);
// return true;
return HDFSUri + destPath;
} catch (Exception e) {
logger.error("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS失败,destPath=[{}]", destPath, e);
return null;
}

}

/**
 * 编辑内容上传至 HDFS
 *
 * @param destPathFlag hdfs路径
 */
public static Boolean copyNodeEditStringToHDFSOld(String destPathFlag, String nodeEditString, String destPath) {
    try {


        Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        URI uri = new URI(HDFSUri + "/");  // 连接资源位置
        FileSystem fs = FileSystem.get(uri, config, "hdfs");

// fs.createNewFile(new Path(destPath+"/",“oozie.xml”));

        //FileInputStream fis=new FileInputStream(new File(destPath));//读取本地文件

// Configuration config=new Configuration();
// config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
// config.set(“fs.file.impl”,org.apache.hadoop.fs.LocalFileSystem.class.getName());
// config.setBoolean(“dfs.support.append”,true);
//String destPath = null;
// FileSystem fs = null;

        FSDataOutputStream outputStream = null;

// String newDestPath = “”;

        if (destPathFlag.equals("sql")) {
            destPath = destPath + "/sql.xml";
        } else if (destPathFlag.equals("python")) {
            destPath = destPath + "/python.xml";
        } else if (destPathFlag.equals("scala")) {
            destPath = destPath + "/scala.xml";
        }
        if (fs.exists(new Path(destPath))) {
            rmfiledir(destPath);
        }
        outputStream = fs.create(new Path(destPath), true);

// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “oozie.xml”;
// destPath=destPath+"/oozie.xml";
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path("/oozie.xml"),true);
//outputStream = fs.createNewFile(new Path(destPath+"/",“oozie.xml”));
logger.info("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS成功,destPath=[{}]", destPath);
// }

// if (destPathFlag==“sql”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “sql.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“python”){
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “python.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
// } else if (destPathFlag==“scala”) {
// String destPathBase="/user/wjs/oozie-apps/";
// destPath = HDFSUri + destPathBase + “scala.log”;
// fs=FileSystem.get(URI.create(destPath), config,“hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFSOld] 编辑内容上传至 HDFS成功,destPath=[{}]",destPath);
// }
//
// }

        outputStream.write(nodeEditString.getBytes("UTF-8"));

//
// //IOUtils.writeFully(oozieString.getBytes(),outputStream);
// //IOUtils.w()
//
outputStream.flush();
//
outputStream.close();
//
fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
fs.close();
logger.info("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS成功,destPath=[{}]", destPath);
return true;
} catch (Exception e) {
logger.error("[copyNodeEditStringToHDFS] NodeEditString上传至HDFS失败,destPath=[{}]", destPath, e);
return false;
}

}


public static Boolean copyNodeEditStringToHDFSLock(String destPathFlag, String nodeEditString, String destPath, String nodeId) {

// JedisCluster jedis = JedisClusterClient.INSTANCE.getJedisCluster();
// 给之前面对面建群的成员发送消息
String setNxkey = “wjslock”; //appId + “" + groupId + Global.REDIS_LOCK;
// String setNxkey =“wjslock”+ "
” +userId; //appId + “_” + groupId + Global.REDIS_LOCK;

    /**
     * 通用缓存
     */
    Cache cache = EhcacheUtil.getCache(Const.GDF);

    int tryCount = 0;
    try {
        while (true) {
            //锁到期时间
            long expires = System.currentTimeMillis() + 10000;
            //失败:0, 成功:1

// Long suo = jedis.setnx(setNxkey, String.valueOf(expires));

            EhcacheUtil.put(cache, EhcacheUtil.newElement(setNxkey, String.valueOf(expires)));
            Object suo = EhcacheUtil.getObjectValue(cache, setNxkey);
            if (null == suo && tryCount < 3) {
                ++tryCount;
                Thread.sleep(100);
                continue;
            }

// faceGroupDao.schedule();
copyNodeEditStringToHDFS(destPathFlag, nodeEditString, destPath, nodeId);
EhcacheUtil.remove(cache, setNxkey);
// jedis.del(setNxkey);
break;
}
return true;
} catch (Exception e) {
logger.error("[copyNodeEditStringToHDFSLock] error", e);
return false;
}
}

public static String copyNodeEditStringToHDFSLockNew2(String destPathFlag, String nodeEditString, String destPath, String nodeId) {

// JedisCluster jedis = JedisClusterClient.INSTANCE.getJedisCluster();
// 给之前面对面建群的成员发送消息
String setNxkey = “wjslock”; //appId + “" + groupId + Global.REDIS_LOCK;
// String setNxkey =“wjslock”+ "
” +userId; //appId + “_” + groupId + Global.REDIS_LOCK;
String hdfsDestPath = null;
/**
* 通用缓存
*/
Cache cache = EhcacheUtil.getCache(Const.GDF);

    int tryCount = 0;
    try {
        while (true) {
            //锁到期时间
            long expires = System.currentTimeMillis() + 10000;
            //失败:0, 成功:1

// Long suo = jedis.setnx(setNxkey, String.valueOf(expires));

            EhcacheUtil.put(cache, EhcacheUtil.newElement(setNxkey, String.valueOf(expires)));
            Object suo = EhcacheUtil.getObjectValue(cache, setNxkey);
            if (null == suo && tryCount < 3) {
                ++tryCount;
                Thread.sleep(100);
                continue;
            }

// faceGroupDao.schedule();
hdfsDestPath = copyNodeEditStringToHDFSNew2(destPathFlag, nodeEditString, destPath, nodeId);
//删除缓存
EhcacheUtil.remove(cache, setNxkey);
// jedis.del(setNxkey);
break;
}
return hdfsDestPath;
} catch (Exception e) {
logger.error("[copyNodeEditStringToHDFSLock] error", e);
return null;
}
}

public static String copyNodeEditStringToHDFSLock2(String destPathFlag, String nodeEditString/*, String destPath,String nodeId*/) {

// JedisCluster jedis = JedisClusterClient.INSTANCE.getJedisCluster();
// 给之前面对面建群的成员发送消息
String setNxkey = “wjslock2”; //appId + “" + groupId + Global.REDIS_LOCK;
// String setNxkey =“wjslock”+ "
” +userId; //appId + “_” + groupId + Global.REDIS_LOCK;

    long timeStamp = System.currentTimeMillis();
    String nodeId = String.valueOf(timeStamp);

// String destBasePath = “/home/wjs/oozie-apps/”;
String destBasePath = GlobalDefine.hdfs_code_save;

// if (destPathFlag .equals(“sql”)) {
// destBasePath = destBasePath + “/sql/”;
// } else if (destPathFlag .equals( “python”)) {
// destBasePath = destBasePath + “python/”;
// } else if (destPathFlag .equals(“scala”) ) {
// destBasePath = destBasePath + “/scala/”;
// }
if (destPathFlag.equals(“hive”)) {
destBasePath = destBasePath + “hive”;
} else if (destPathFlag.equals(“shell”)) {
destBasePath = destBasePath + “shell”;
}

// String destPath = destBasePath + nodeId;
String destPath = destBasePath;

    if (!HDFSUtil.mkdir(destPath)) {
        logger.error("[copyNodeEditStringToHDFS] hdfs创建目录失败,destPath=[{}]", destPath);

// return new ResultModel(ResultModel.RESULT_FAILURE, “hdfs创建目录失败”, new HashMap<>());
return null;
}

    /**
     * 通用缓存
     */
    Cache cache = EhcacheUtil.getCache(Const.GDF);

// String destPath = null;

    int tryCount = 0;
    try {
        while (true) {
            //锁到期时间
            long expires = System.currentTimeMillis() + 10000;
            //失败:0, 成功:1

// Long suo = jedis.setnx(setNxkey, String.valueOf(expires));

            EhcacheUtil.put(cache, EhcacheUtil.newElement(setNxkey, String.valueOf(expires)));
            Object suo = EhcacheUtil.getObjectValue(cache, setNxkey);
            if (null == suo && tryCount < 3) {
                ++tryCount;
                Thread.sleep(100);
                continue;
            }

// faceGroupDao.schedule();
destPath = copyNodeEditStringToHDFS2(destPathFlag, nodeEditString, destPath, nodeId);
EhcacheUtil.remove(cache, setNxkey);
// jedis.del(setNxkey);
break;
}
// return true;
return destPath;
} catch (Exception e) {
logger.error("[copyNodeEditStringToHDFSLock] error", e);
// return false;
return null;
}
}

/**
 * 删除后内容上传至 HDFS
 *
 * @param srcFile      源文件 路径
 * @param destPathFlag hdfs路径
 */
public static Boolean copydeleteStringToHDFS(String srcFile, String destPathFlag, List<String> deleteStringList) {
    try {
        FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
        Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        config.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
        config.setBoolean("dfs.support.append", true);
        String destPath = null;
        FileSystem fs = null;
        FSDataOutputStream outputStream = null;
        if (destPathFlag == "sql") {

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “sql.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copydeleteStringToHDFS] 删除后内容上传至HDFS成功,destPath=[{}]", destPath);
}
} else if (destPathFlag == “python”) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “python.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copydeleteStringToHDFS] 删除后内容上传至 HDFS成功,destPath=[{}]", destPath);
}
} else if (destPathFlag == “scala”) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “scala.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copydeleteStringToHDFS] 删除后内容上传至 HDFS成功,destPath=[{}]", destPath);
}
}
/*
Long originRowCount=getRowCountByFile(destPath, fs);
Long addRowCount = (long) editStringList.size();
if (originRowCount + addRowCount > 1000) {
Long deleteCount = originRowCount + addRowCount - 1000;

        }

*/

        StringBuilder sb = new StringBuilder();
        for (int i = 0; i < deleteStringList.size(); i++) {
            sb.append(deleteStringList.get(i)).append("\r\n");
        }
        outputStream.write(sb.toString().getBytes());

        outputStream.flush();

        outputStream.close();

        fs.close();

/*
OutputStream os=fs.create(new Path(destPath));
//copy
IOUtils.copyBytes(fis, os, 4096, true);
fs.close();
*/
logger.info("[copydeleteStringToHDFS] 删除后内容上传至HDFS成功,srcFile:{},destPathFlag:{}", srcFile, destPathFlag);
return true;
} catch (Exception e) {
logger.error("[copydeleteStringToHDFS] 删除后内容上传至HDFS失败,srcFile:{},destPathFlag:{}", srcFile, destPathFlag, e);
return false;
}
}

public static Long getRowCountByFile(String txtFilePath, FileSystem fs) {

    StringBuffer buffer = new StringBuffer();
    FSDataInputStream fsr = null;
    BufferedReader bufferedReader = null;
    String lineTxt = null;
    long RowCount = 0;
    try {
        //FileSystem fs = FileSystem.get(URI.create(txtFilePath),conf);
        fsr = fs.open(new Path(txtFilePath));
        bufferedReader = new BufferedReader(new InputStreamReader(fsr));
        while ((lineTxt = bufferedReader.readLine()) != null) {
            RowCount++;

/*
if(lineTxt.split("\t")[0].trim().equals(“00067”)){
return lineTxt;
}
*/

        }
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (bufferedReader != null) {
            try {
                bufferedReader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    return RowCount;
}


public static List<List<String>> getRowTextByFile(String destPathFlag) {

    StringBuffer buffer = new StringBuffer();
    FSDataInputStream fsr = null;
    BufferedReader bufferedReader = null;
    String lineTxt = null;
    List<List<String>> rowTextList = new ArrayList<>();
    long RowCount = 0;


    try {

// FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
Configuration config = new Configuration();
config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
config.setBoolean(“dfs.support.append”, true);
String destPath = null;
FileSystem fs = null;
FSDataOutputStream outputStream = null;
if (destPathFlag.equals(“sql”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “sql.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }
} else if (destPathFlag.equals(“python”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “python.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }
} else if (destPathFlag.equals(“scala”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “scala.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }

        }

// FileSystem fs = null;

        //FileSystem fs = FileSystem.get(URI.create(txtFilePath),conf);
        fsr = fs.open(new Path(destPath));
        bufferedReader = new BufferedReader(new InputStreamReader(fsr));
        while ((lineTxt = bufferedReader.readLine()) != null) {
            RowCount++;
            List<String> textList = new ArrayList<>();
            textList.add(lineTxt);

            rowTextList.add(textList);

/*
if(lineTxt.split("\t")[0].trim().equals(“00067”)){
return lineTxt;
}
*/

        }
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (bufferedReader != null) {
            try {
                bufferedReader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    return rowTextList;
}


public static List<List<String>> getRowTextByFile2(String destPathFlag) {


    StringBuffer buffer = new StringBuffer();
    FSDataInputStream fsr = null;
    BufferedReader bufferedReader = null;
    String lineTxt = null;
    List<List<String>> rowTextList = new ArrayList<>();
    long RowCount = 0;

// List textList = new ArrayList<>();

    try {
        /**
         * 通用缓存
         */
        Cache cache = EhcacheUtil.getCache(Const.GDF);
        String setSqlkey = "wjsSqlRunHistory";
        String setPythonkey = "wjsPythonRunHistory";
        String setScalakey = "wjsScalaRunHistory";

        if (destPathFlag.equals("sql")) {
            //注意 此处可能会报空指针 原因 cache(缓存中)没有对应的缓存
            if (EhcacheUtil.getObjectValue(cache, setSqlkey) != null) {
                GlobalDefine.editSqlList = (List<String>) EhcacheUtil.getObjectValue(cache, setSqlkey);
                for (String editSql : GlobalDefine.editSqlList) {
                    List<String> textList = new ArrayList<>();
                    textList.add(editSql);
                    rowTextList.add(textList);
                }
            }
        } else if (destPathFlag.equals("python")) {
            if (EhcacheUtil.getObjectValue(cache, setPythonkey) != null) {
                GlobalDefine.editPythonList = (List<String>) EhcacheUtil.getObjectValue(cache, setPythonkey);
                for (String editPython : GlobalDefine.editPythonList) {
                    List<String> textList = new ArrayList<>();
                    textList.add(editPython);
                    rowTextList.add(textList);
                }
            }
        } else if (destPathFlag.equals("scala")) {
            if (EhcacheUtil.getObjectValue(cache, setScalakey) != null) {
                GlobalDefine.editScalaList = (List<String>) EhcacheUtil.getObjectValue(cache, setScalakey);
                for (String editScala : GlobalDefine.editScalaList) {
                    List<String> textList = new ArrayList<>();
                    textList.add(editScala);
                    rowTextList.add(textList);
                }
            }
        }

// try {
FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
// Configuration config = new Configuration();
// config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
// config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
// config.setBoolean(“dfs.support.append”, true);
// String destPath = null;
// FileSystem fs = null;
// FSDataOutputStream outputStream = null;
// if (destPathFlag.equals(“sql”)) {
String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = GlobalDefine.hfds_code_log;
// destPath = HDFSUri + destPathBase + “sql.log”;
// fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}
// } else if (destPathFlag.equals(“python”)) {
String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = GlobalDefine.hfds_code_log;
// destPath = HDFSUri + destPathBase + “python.log”;
// fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}
// } else if (destPathFlag.equals(“scala”)) {
String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = GlobalDefine.hfds_code_log;
// destPath = HDFSUri + destPathBase + “scala.log”;
// fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}
//
// }
//
//
FileSystem fs = null;
//
// //FileSystem fs = FileSystem.get(URI.create(txtFilePath),conf);
// fsr = fs.open(new Path(destPath));
// bufferedReader = new BufferedReader(new InputStreamReader(fsr));
// while ((lineTxt = bufferedReader.readLine()) != null) {
// RowCount++;
// List textList = new ArrayList<>();
// textList.add(lineTxt);
//
// rowTextList.add(textList);
//
//
///*
// if(lineTxt.split("\t")[0].trim().equals(“00067”)){
// return lineTxt;
// }
//*/
//
// }
} catch (Exception e) {
e.printStackTrace();
} finally {
if (bufferedReader != null) {
try {
bufferedReader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}

    return rowTextList;
}

public static void getRowTextByFile_schedule(/*String destPathFlag */) {

    StringBuffer buffer = new StringBuffer();
    FSDataInputStream fsr = null;
    BufferedReader bufferedReader = null;
    String lineTxt = null;

// List<List> rowTextList = new ArrayList<>();
String rowTextList = null;
long RowCount = 0;

    /**
     * 通用缓存
     */
    Cache cache = EhcacheUtil.getCache(Const.GDF);
    String setSqlkey = "wjsSqlRunHistory";
    String setPythonkey = "wjsPythonRunHistory";
    String setScalakey = "wjsScalaRunHistory";


    String destPathFlag_sql = "sql";
    String destPathFlag_python = "python";
    String destPathFlag_scala = "scala";


    try {

// FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
Configuration config = new Configuration();
config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
config.setBoolean(“dfs.support.append”, true);
String destPath = null;
FileSystem fs = null;
FSDataOutputStream outputStream = null;

        if (destPathFlag_sql.equals("sql")) {

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “sqlSchedule.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }
if (HDFSUtil.isExistHdfsPath(destPath)) {
fsr = fs.open(new Path(destPath));
bufferedReader = new BufferedReader(new InputStreamReader(fsr));
while ((lineTxt = bufferedReader.readLine()) != null) {
RowCount++;
// List textList = new ArrayList<>();
// textList.add(lineTxt);

// rowTextList.add(textList);
// if (!StringUtils.isBlank(lineTxt)) {
rowTextList = lineTxt;
// }

/*
if(lineTxt.split("\t")[0].trim().equals(“00067”)){
return lineTxt;
}
*/

                }


                List<String> pList = JSON.parseArray(rowTextList, String.class);
                if (pList.size() != 0) {

// if (destPathFlag_sql.equals(“sql”)) {
GlobalDefine.editSqlList = pList;
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey,GlobalDefine.editSqlList));
EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey, pList));
// }
// if (destPathFlag_python.equals(“python”)) {
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setPythonkey, pList));
// }
// if (destPathFlag_scala.equals(“scala”)) {
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setScalakey, pList));
// }
}

                if (bufferedReader != null) {

// try {
bufferedReader.close();
// } catch (IOException e) {
// e.printStackTrace();
// }
}
}

        }
        if (destPathFlag_python.equals("python")) {

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “pythonSchedule.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }

            if (HDFSUtil.isExistHdfsPath(destPath)) {
                fsr = fs.open(new Path(destPath));
                bufferedReader = new BufferedReader(new InputStreamReader(fsr));
                while ((lineTxt = bufferedReader.readLine()) != null) {
                    RowCount++;

// List textList = new ArrayList<>();
// textList.add(lineTxt);

// rowTextList.add(textList);
// if (!StringUtils.isBlank(lineTxt)) {
rowTextList = lineTxt;
// }

/*
if(lineTxt.split("\t")[0].trim().equals(“00067”)){
return lineTxt;
}
*/

                }


                List<String> pList = JSON.parseArray(rowTextList, String.class);
                if (pList.size() != 0) {
                    GlobalDefine.editPythonList = pList;

// if (destPathFlag_sql.equals(“sql”)) {
EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey,GlobalDefine.editSqlList));
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey, pList));
// }
// if (destPathFlag_python.equals(“python”)) {
EhcacheUtil.put(cache, EhcacheUtil.newElement(setPythonkey, pList));
// }
// if (destPathFlag_scala.equals(“scala”)) {
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setScalakey, pList));
// }
}

                if (bufferedReader != null) {

// try {
bufferedReader.close();
// } catch (IOException e) {
// e.printStackTrace();
// }
}

            }


        }
        if (destPathFlag_scala.equals("scala")) {

// String destPathBase = “/user/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hfds_code_log;
destPath = HDFSUri + destPathBase + “scalaSchedule.log”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }

            if (HDFSUtil.isExistHdfsPath(destPath)) {
                fsr = fs.open(new Path(destPath));
                bufferedReader = new BufferedReader(new InputStreamReader(fsr));
                while ((lineTxt = bufferedReader.readLine()) != null) {
                    RowCount++;

// List textList = new ArrayList<>();
// textList.add(lineTxt);

// rowTextList.add(textList);
// if (!StringUtils.isBlank(lineTxt)) {
rowTextList = lineTxt;
// }

/*
if(lineTxt.split("\t")[0].trim().equals(“00067”)){
return lineTxt;
}
*/

                }


                List<String> pList = JSON.parseArray(rowTextList, String.class);
                if (pList.size() != 0) {
                    GlobalDefine.editScalaList = pList;

// if (destPathFlag_sql.equals(“sql”)) {
EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey,GlobalDefine.editSqlList));
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey, pList));
// }
// if (destPathFlag_python.equals(“python”)) {
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setPythonkey, pList));
// }
// if (destPathFlag_scala.equals(“scala”)) {
EhcacheUtil.put(cache, EhcacheUtil.newElement(setScalakey, pList));
// }
}

                if (bufferedReader != null) {

// try {
bufferedReader.close();
// } catch (IOException e) {
// e.printStackTrace();
// }
}

            }


        }

// FileSystem fs = null;

        //FileSystem fs = FileSystem.get(URI.create(txtFilePath),conf);

// fsr = fs.open(new Path(destPath));
// bufferedReader = new BufferedReader(new InputStreamReader(fsr));
// while ((lineTxt = bufferedReader.readLine()) != null) {
// RowCount++;
List textList = new ArrayList<>();
textList.add(lineTxt);
//
//
rowTextList.add(textList);
if (!StringUtils.isBlank(lineTxt)) {
// rowTextList = lineTxt;
}
//
//
///*
// if(lineTxt.split("\t")[0].trim().equals(“00067”)){
// return lineTxt;
// }
//*/
//
// }
//
//
// List pList = JSON.parseArray(rowTextList, String.class);
// if (pList.size()!= 0) {
// if (destPathFlag_sql.equals(“sql”)) {
EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey,GlobalDefine.editSqlList));
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setSqlkey, pList));
// }
// if (destPathFlag_python.equals(“python”)) {
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setPythonkey, pList));
// }
// if (destPathFlag_scala.equals(“scala”)) {
// EhcacheUtil.put(cache, EhcacheUtil.newElement(setScalakey, pList));
// }
// }

    } catch (Exception e) {
        e.printStackTrace();
    } /*finally {
        if (bufferedReader != null) {
            try {
                bufferedReader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }*/

// return rowTextList;
}

// public static List<List> getVersionInfoByFile(String destPathFlag ) {
//
// StringBuffer buffer = new StringBuffer();
// FSDataInputStream fsr = null;
// BufferedReader bufferedReader = null;
// String lineTxt = null;
// List<List> rowTextList = new ArrayList<>();
// long RowCount = 0;
//
//
// try {
FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
// Configuration config = new Configuration();
// config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
// config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
// config.setBoolean(“dfs.support.append”, true);
// String destPath = null;
// FileSystem fs = null;
// FSDataOutputStream outputStream = null;
// if (destPathFlag.equals(“sql”) ) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// destPath = HDFSUri + destPathBase + “sql.log”;
// fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}
// } else if (destPathFlag .equals(“python”) ) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// destPath = HDFSUri + destPathBase + “python.log”;
// fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}
// } else if (destPathFlag .equals(“scala”) ) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// destPath = HDFSUri + destPathBase + “scala.log”;
// fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
if (!fs.exists(new Path(destPath))) {
// 创建目录
//fs.mkdirs(new Path(path));
outputStream = fs.create(new Path(destPath));
logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
}
//
// }
//
//
//
FileSystem fs = null;
//
// //FileSystem fs = FileSystem.get(URI.create(txtFilePath),conf);
// fsr = fs.open(new Path(destPath));
// bufferedReader = new BufferedReader(new InputStreamReader(fsr));
// while ((lineTxt = bufferedReader.readLine()) != null) {
// RowCount++;
// List textList = new ArrayList<>();
// textList.add(lineTxt);
//
// rowTextList.add(textList);
//
//
///*
// if(lineTxt.split("\t")[0].trim().equals(“00067”)){
// return lineTxt;
// }
//*/
//
// }
// } catch (Exception e) {
// e.printStackTrace();
// } finally {
// if (bufferedReader != null) {
// try {
// bufferedReader.close();
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
// }
//
// return rowTextList;
// }

public static String getNodeRunCodeNew(String destPathFlag, String fileName, String filePathName) {

    StringBuffer buffer = new StringBuffer();
    FSDataInputStream fsr = null;
    BufferedReader bufferedReader = null;
    String lineTxt = null;
    List<String> rowTextList = new ArrayList<>();
    long RowCount = 0;

    String result = null;


    try {

// FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
Configuration config = new Configuration();
config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
config.setBoolean(“dfs.support.append”, true);
String destPath = null;
FileSystem fs = null;
FSDataOutputStream outputStream = null;
if (destPathFlag.equals(“sql”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “sql.log”;
destPath = HDFSUri + destPathBase + “/sql/” + filePathName + “/” + fileName;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }
} else if (destPathFlag.equals(“python”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “python.log”;
destPath = HDFSUri + destPathBase + “/python/” + filePathName + “/” + fileName;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }
} else if (destPathFlag.equals(“scala”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “scala.log”;
destPath = HDFSUri + destPathBase + “/scala/” + filePathName + “/” + fileName;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }

        }

// FileSystem fs = null;

        //FileSystem fs = FileSystem.get(URI.create(txtFilePath),conf);
        fsr = fs.open(new Path(destPath));
        bufferedReader = new BufferedReader(new InputStreamReader(fsr));
        while ((lineTxt = bufferedReader.readLine()) != null) {

// RowCount++;
// List textList = new ArrayList<>();
// textList.add(lineTxt);
//
// rowTextList.add(textList);

            RowCount++;
            List<String> textList = new ArrayList<>();
            textList.add(lineTxt);
            StringBuilder stringBuilderTextList = new StringBuilder();

// String textlistString = null;
for (String s : textList) {
stringBuilderTextList.append(s);
}

            rowTextList.add(stringBuilderTextList.toString());

/*
if(lineTxt.split("\t")[0].trim().equals(“00067”)){
return lineTxt;
}
*/

        }


        StringBuilder stringBuilderRowTextList = new StringBuilder();
        for (String row : rowTextList) {
            stringBuilderRowTextList.append(row);
        }
        result = stringBuilderRowTextList.toString();

    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (bufferedReader != null) {
            try {
                bufferedReader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    return result;
}

public static List<List<String>> getVersionInfo(String destPathFlag, String parentNodeText) {

    StringBuffer buffer = new StringBuffer();
    FSDataInputStream fsr = null;
    BufferedReader bufferedReader = null;
    String lineTxt = null;

// List rowTextList = new ArrayList<>();
List<List> rowTextList = new ArrayList<>();
long RowCount = 0;

    String result = null;


    try {

// FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
Configuration config = new Configuration();
config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
config.setBoolean(“dfs.support.append”, true);
String destPath = null;
FileSystem fs = null;
FSDataOutputStream outputStream = null;
if (destPathFlag.equals(“sql”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “sql.log”;
destPath = HDFSUri + destPathBase + “/sql/” + parentNodeText + “/” + parentNodeText + “.version”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }
} else if (destPathFlag.equals(“python”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “python.log”;
destPath = HDFSUri + destPathBase + “/python/” + parentNodeText + “/” + parentNodeText + “.version”;
;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }
} else if (destPathFlag.equals(“scala”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “scala.log”;
destPath = HDFSUri + destPathBase + “/scala/” + parentNodeText + “/” + parentNodeText + “.version”;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }

        }

// FileSystem fs = null;

        //FileSystem fs = FileSystem.get(URI.create(txtFilePath),conf);
        fsr = fs.open(new Path(destPath));
        bufferedReader = new BufferedReader(new InputStreamReader(fsr));
        while ((lineTxt = bufferedReader.readLine()) != null) {
            RowCount++;
            List<String> textList = new ArrayList<>();
            textList.add(lineTxt);

            rowTextList.add(textList);

// RowCount++;
// List textList = new ArrayList<>();
// textList.add(lineTxt);
// StringBuilder stringBuilderTextList = new StringBuilder();
String textlistString = null;
// for (String s : textList) {
// stringBuilderTextList.append(s);
// }
//
// rowTextList.add(stringBuilderTextList.toString());

/*
if(lineTxt.split("\t")[0].trim().equals(“00067”)){
return lineTxt;
}
*/

        }

// StringBuilder stringBuilderRowTextList = new StringBuilder();
// for (String row : rowTextList) {
// stringBuilderRowTextList.append(row);
// }
// result = stringBuilderRowTextList.toString();

    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (bufferedReader != null) {
            try {
                bufferedReader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    return rowTextList;
}


public static String getNodeRunCode(String destPathFlag, String fileName) {

    StringBuffer buffer = new StringBuffer();
    FSDataInputStream fsr = null;
    BufferedReader bufferedReader = null;
    String lineTxt = null;
    List<String> rowTextList = new ArrayList<>();
    long RowCount = 0;

    String result = null;


    try {

// FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
Configuration config = new Configuration();
config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
config.setBoolean(“dfs.support.append”, true);
String destPath = null;
FileSystem fs = null;
FSDataOutputStream outputStream = null;
if (destPathFlag.equals(“sql”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “sql.log”;
destPath = HDFSUri + destPathBase + “/sql/” + fileName;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }
} else if (destPathFlag.equals(“python”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “python.log”;
destPath = HDFSUri + destPathBase + “/python/” + fileName;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }
} else if (destPathFlag.equals(“scala”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “scala.log”;
destPath = HDFSUri + destPathBase + “/scala/” + fileName;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }

        }

// FileSystem fs = null;

        //FileSystem fs = FileSystem.get(URI.create(txtFilePath),conf);
        fsr = fs.open(new Path(destPath));
        bufferedReader = new BufferedReader(new InputStreamReader(fsr));
        while ((lineTxt = bufferedReader.readLine()) != null) {

// RowCount++;
// List textList = new ArrayList<>();
// textList.add(lineTxt);
//
// rowTextList.add(textList);

            RowCount++;
            List<String> textList = new ArrayList<>();
            textList.add(lineTxt);
            StringBuilder stringBuilderTextList = new StringBuilder();

// String textlistString = null;
for (String s : textList) {
stringBuilderTextList.append(s);
}

            rowTextList.add(stringBuilderTextList.toString());

/*
if(lineTxt.split("\t")[0].trim().equals(“00067”)){
return lineTxt;
}
*/

        }


        StringBuilder stringBuilderRowTextList = new StringBuilder();
        for (String row : rowTextList) {
            stringBuilderRowTextList.append(row);
        }
        result = stringBuilderRowTextList.toString();

    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (bufferedReader != null) {
            try {
                bufferedReader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    return result;
}

public static List<List<String>> getNodeRunCodeOld(String destPathFlag, String fileName) {

    StringBuffer buffer = new StringBuffer();
    FSDataInputStream fsr = null;
    BufferedReader bufferedReader = null;
    String lineTxt = null;
    List<List<String>> rowTextList = new ArrayList<>();
    long RowCount = 0;


    try {

// FileInputStream fis = new FileInputStream(new File(srcFile));//读取本地文件
Configuration config = new Configuration();
config.set(“fs.hdfs.impl”, “org.apache.hadoop.hdfs.DistributedFileSystem”);
config.set(“fs.file.impl”, org.apache.hadoop.fs.LocalFileSystem.class.getName());
config.setBoolean(“dfs.support.append”, true);
String destPath = null;
FileSystem fs = null;
FSDataOutputStream outputStream = null;
if (destPathFlag.equals(“sql”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “sql.log”;
destPath = HDFSUri + destPathBase + “/sql/” + fileName;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }
} else if (destPathFlag.equals(“python”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “python.log”;
destPath = HDFSUri + destPathBase + “/python/” + fileName;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }
} else if (destPathFlag.equals(“scala”)) {
// String destPathBase = “/user/wjs/oozie-apps/”;
// String destPathBase = “/home/wjs/oozie-apps/”;
String destPathBase = GlobalDefine.hdfs_code_save;
// destPath = HDFSUri + destPathBase + “scala.log”;
destPath = HDFSUri + destPathBase + “/scala/” + fileName;
fs = FileSystem.get(URI.create(destPath), config, “hdfs”);
// if (!fs.exists(new Path(destPath))) {
// // 创建目录
// //fs.mkdirs(new Path(path));
// outputStream = fs.create(new Path(destPath));
// logger.info("[copyEditStringToHDFS] 编辑内容上传至 HDFS成功,destPath=[{}]", destPath);
// }

        }

// FileSystem fs = null;

        //FileSystem fs = FileSystem.get(URI.create(txtFilePath),conf);
        fsr = fs.open(new Path(destPath));
        bufferedReader = new BufferedReader(new InputStreamReader(fsr));
        while ((lineTxt = bufferedReader.readLine()) != null) {
            RowCount++;
            List<String> textList = new ArrayList<>();
            textList.add(lineTxt);

            rowTextList.add(textList);

/*
if(lineTxt.split("\t")[0].trim().equals(“00067”)){
return lineTxt;
}
*/

        }
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (bufferedReader != null) {
            try {
                bufferedReader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    return rowTextList;
}

public static Boolean deleteRowCountByFile(String txtFilePath, FileSystem fs, Long deleteCount, String classPathBase, String destPathFlag) {
    try {
        //String destPath = "delete";
        String classPath = classPathBase + destPathFlag;
        if (!MakeDirectoryUtil.mkDirectory(classPath)) {
            logger.error("[deleteRowCountByFile] 创建文件目录失败,txtFilePath=[{}]", txtFilePath);
            return false;
        }
        getFile(txtFilePath, classPath);
        File classPathFile = new File(classPath);
        List<String> deleteStringList = FileReadAndRemoveUtil.readAndRemoveFirstLines(classPathFile, deleteCount);
        if (!rmfiledir(txtFilePath)) {
            logger.error("[deleteRowCountByFile] 文件删除失败,txtFilePath=[{}]", txtFilePath);
            return false;
        }
        String directoryFile = null;
        if (destPathFlag == "sql") {
            directoryFile = GetDirectoryUtil.getSqlDirectoryFile(classPathBase);
        } else if (destPathFlag == "python") {
            directoryFile = GetDirectoryUtil.getPythonDirectoryFile(classPathBase);
        } else if (destPathFlag == "scala") {
            directoryFile = GetDirectoryUtil.getPythonDirectoryFile(classPathBase);
        }

        if (!copydeleteStringToHDFS(directoryFile, destPathFlag, deleteStringList)) {
            logger.error("[deleteRowCountByFile] 上传删除内容后文件失败,txtFilePath=[{}]", txtFilePath);
            return false;
        }
        DeleteDirectoryUtil.deleteFile(directoryFile);


        logger.info("[deleteRowCountByFile] 上传删除内容后文件成功,txtFilePath=[{}]", txtFilePath);
        return true;
    } catch (Exception e) {
        logger.error("[deleteRowCountByFile] 上传删除内容后文件失败,txtFilePath=[{}]", txtFilePath, e);
        return false;
    }
}


/**
 * 从HDFS下载文件到本地
 *
 * @param srcFile  HDFS文件路径
 * @param destPath 本地路径
 */
public static Boolean getFile(String srcFile, String destPath) {
    try {
        //hdfs文件 地址
        String file = HDFSUri + srcFile;
        Configuration config = new Configuration();
        config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");

        //构建FileSystem
        FileSystem fs = FileSystem.get(URI.create(file), config, "hdfs");
        //读取文件
        InputStream is = fs.open(new Path(file));
        //保存到本地  最后 关闭输入输出流
        IOUtils.copyBytes(is, new FileOutputStream(new File(destPath)), 2048, true);
        fs.close();
        logger.info("[getFile] 从HDFS下载文件到本地成功,srcFile:[{}],destPath:[{}]", srcFile, destPath);
        return true;
    } catch (Exception e) {
        logger.error("[getFile] 从HDFS下载文件到本地失败,srcFile:[{}],destPath:[{}]", srcFile, destPath, e);
        return false;
    }
}

/**
 * 删除文件或者文件目录
 *
 * @param path
 */
public static Boolean rmfiledir(String path) {
    try {
        // 返回FileSystem对象
        FileSystem fs = getFileSystem();
        String hdfsUri = HDFSUri;
        if (StringUtils.isNotBlank(hdfsUri)) {
            path = hdfsUri + path;
        }
        if (fs.delete(new Path(path), true)) { // 删除文件
            fs.close();// 释放资源
            logger.info("[rmfiledir] 文件删除成功,path=[{}]", path);
            return true;
        } else {
            logger.info("[rmfiledir] 文件删除失败,path=[{}]", path);
            return false;
        }
    } catch (Exception e) {
        logger.error("[rmfiledir] 文件删除失败,path=[{}]", path, e);
        return false;
    }
}

/**
 * 读取文件的内容
 *
 * @param filePath
 * @throws IOException
 */
public static void readFile(String filePath) throws Exception {
    Configuration config = new Configuration();
    config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
    String file = HDFSUri + filePath;
    FileSystem fs = FileSystem.get(URI.create(file), config, "hdfs");
    //读取文件
    InputStream is = fs.open(new Path(file));
    //读取文件
    IOUtils.copyBytes(is, System.out, 2048, false); //复制到标准输出流
    fs.close();
}

/**
 * 删除文件
 *
 * @param filePath
 * @throws IOException
 */
public static void deleteFile(String filePath) throws Exception {
    Configuration config = new Configuration();
    config.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
    String file = HDFSUri + filePath;
    FileSystem fs = FileSystem.get(URI.create(file), config, "hdfs");
    if (fs.delete(new Path(file), false)) { // 删除文件
        logger.info("[deleteFile] 文件删除成功,file=[{}]", file);
    } else {
        logger.info("[deleteFile] 文件不存在,file=[{}]", file);
    }
    fs.close();
}


/**
 * 获取指定目录下的所有文件
 *
 * @author
 */
public static List<FileInfo> getDirectoryFromHdfs(String direPath) {

    try {
        Configuration conf = new Configuration();
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
        String filePath = HDFSUri + direPath;
        FileSystem fs = FileSystem.get(URI.create(filePath), conf, "hdfs");
        FileStatus[] filelist = fs.listStatus(new Path(filePath));
        List<FileInfo> fileInfoList = new ArrayList<>();
        for (int i = 0; i < filelist.length; i++) {
            FileStatus file = filelist[i];
            String path = file.getPath().toString();//文件路径
            Boolean isDirectory = file.isDirectory();
            long modification_time = file.getModificationTime();
            long accessTime = file.getAccessTime();  //该文件上次访问时间
            String owner = file.getOwner();          // 文件拥有者
            String group = file.getGroup();         //文件所属组
            String permission = file.getPermission().toString();
            long length = 0L;
            short replication = 0; //文件副本数
            long blockSize = 0L;   //文件块大小
            if (file.isFile()) {
                length = file.getLen();
                replication = file.getReplication(); //文件副本数
                blockSize = file.getBlockSize();   //文件块大小
            }
            FileInfo fileInfo = new FileInfo();
            fileInfo.setPath(path);
            fileInfo.setDirectory(isDirectory);
            fileInfo.setModification_time(modification_time);
            fileInfo.setAccessTime(accessTime);
            fileInfo.setOwner(owner);
            fileInfo.setGroup(group);
            fileInfo.setPermission(permission);
            fileInfo.setLength(length);
            fileInfo.setReplication(replication);
            fileInfo.setBlockSize(blockSize);
            fileInfoList.add(fileInfo);
        }
        fs.close();
        logger.info("[getDirectoryFromHdfs] 获取指定目录下的所有文件,direPath=[{}],fileInfoList=[{}]", direPath, fileInfoList);
        return fileInfoList;
    } catch (Exception e) {
        logger.error("[getDirectoryFromHdfs] 获取指定目录下的所有文件失败,direPath=[{}]", direPath, e);
        return null;
    }
}

public static Boolean renameFile(String src, String dst) {
    Configuration conf = new Configuration();
    conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
    FileSystem fs = getFileSystem();
    try {

// fs = FileSystem.get(conf);
Path srcPath = new Path(src);
Path dstPath = new Path(dst);
fs.rename(srcPath, dstPath);
} catch (Exception ex) {
logger.error("[renameFile] 重命名文件失败,src=[{}],dst=[{}]", src, dst, ex);
return false;
} finally {
if (fs != null) {
try {
fs.close();
} catch (IOException e) {
logger.error("[renameFile] 重命名文件失败,src=[{}],dst=[{}]", src, dst, e);
return false;
}
}
}
return true;
}

public static void main(String[] args) throws Exception {

/*
String HDFSFile = “/hollycrm/data01/codecs/1月.zip”;
String localFile = “D:\1月.zip”;
//连接fs
FileSystem fs = getFileSystem();
System.out.println(fs.getUsed());
//创建路径
mkdir("/zhaojy2");
//验证是否存在
System.out.println(existDir("/zhaojy2",false));
/ //上传文件到HDFS
/

mkdir("/wjs");
System.out.println(existDir("/wjs",false));
// copyFileToHDFS(“E:\HDFSTest.txt”,"/zhaojy/HDFSTest.txt");
copyFileToHDFS(“E:\HDFSTest.txt”,"/wjs/HDFSTest.txt");
*/

    //下载文件到本地

// getFile("/zhaojy/HDFSTest.txt",“D:\HDFSTest.txt”);
// getFile("/job.properties",“E:\job.properties”);
// getFile(HDFSFile,localFile);
//删除文件
// rmdir("/zhaojy2");
//读取文件
// readFile("/zhaojy/HDFSTest.txt");
}

}

  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值