String currentDay = dateTimeUtil.currentDate();
// 检查是否有当前日期的目录
fsUtil.checkDirExists(new File(configUtil.readConfig("download.file.path") + currentDay));
FSDataInputStream in = null;
// 生成以JobID命名的目录
String fileName = configUtil.readConfig("download.file.path") + currentDay + "/" + jobID + ".csv";
File file = new File(fileName);
List> dataList = new ArrayList<>();
try {
String dataPath = configUtil.readConfig("offLine.data.path") + currentDay + "/" + jobID + ".csv/";
LOG.info(dataPath);
Path path = new Path(dataPath);
Configuration configuration = dfsUtil.getHadoopConf();
FileSystem hdfs = FileSystem.get(configuration);
if (hdfs.exists(path) && hdfs.isDirectory(path)) {
FileStatus[] srcFileStatus = hdfs.listStatus(path);
for (FileStatus fs : srcFileStatus) {
if (fs.getLen() > 0) {
String line;
in = hdfs.open(fs.getPath());
while ((line = in.readLine()) != null) {
LOG.info(new String(line.getBytes(Constant.CODE_FORMAT)));
dataList.add(Lists.newArrayList(new String(line.getBytes())));
}
}
}
}
} catch (Exception error) {
error.getMessage();
error.printStackTrace();
} finally {
org.apache.hadoop.io.IOUtils.closeStream(in);
}
// 处理数据
List result = new ArrayList<>();
for (List queryResultEle : dataList) {
String dataEle = "";
for (int i = 0; i < queryResultEle.size(); i++) {
if ((queryResultEle.size() - 1) == i) {
dataEle += queryResultEle.get(i);
} else {
dataEle = dataEle + queryResultEle.get(i) + "\t";
}
}
result.add(dataEle);
}
csvUtil.exportCsv(file, result);