package com.dragonsoft.cicada.datacenter.modules;
import java.io.*;
import java.net.URI;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.zip.ZipOutputStream;
import cn.hutool.core.text.csv.CsvData;
import cn.hutool.core.text.csv.CsvUtil;
import com.code.common.utils.StringUtils;
import com.csvreader.CsvReader;
import com.csvreader.CsvWriter;
import com.dragoninfo.dfw.bean.Result;
import com.dragonsoft.cicada.datacenter.modules.dataplan.externaldatasources.vo.DataSetAuthVo;
import com.fw.service.BaseService;
import com.fw.service.annotation.Service;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.*;
import org.apache.poi.xssf.usermodel.XSSFRow;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@Controller
@CrossOrigin
@RequestMapping("/HdfsTest")
@Api(value = "HdfsTest|HDFS测试")
public class HdfsTest extends BaseService {
@Value("${hdfs.defaultFS}")
private String hdfsUrl;
@Value("${hdfs.hadoopLoginName:root}")
private String hadoopLoginName;
private static Configuration conf = new Configuration();
@ResponseBody
@RequestMapping("/initConfig")
@ApiOperation(value = "初始化配置")
public Result initConfig()throws IOException{
if(StringUtils.isBlank(hdfsUrl)){
System.out.println("----------hdfsUrl为空-------");
}
conf.set("fs.defaultFS", hdfsUrl);
return Result.success(hdfsUrl);
}
public static void createFile(String dst , byte[] contents) throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path dstPath = new Path(dst);
FSDataOutputStream outputStream = fs.create(dstPath);
outputStream.write(contents);
outputStream.close();
fs.close();
System.out.println("文件创建成功!");
}
private void init(){
conf.set("fs.defaultFS", hdfsUrl);
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
}
@ResponseBody
@RequestMapping("/uploadFile")
@ApiOperation(value = "上传本地文件")
public Result uploadFile(String src,String dst) throws IOException{
try{
init();
FileSystem fs = FileSystem.get(conf);
Path srcPath = new Path(src);
Path dstPath = new Path(dst);
fs.copyFromLocalFile(false, srcPath, dstPath);
System.out.println("Upload to "+conf.get("fs.default.name"));
System.out.println("------------list files------------"+"\n");
FileStatus [] fileStatus = fs.listStatus(dstPath);
for (FileStatus file : fileStatus)
{
System.out.println(file.getPath());
}
fs.close();
}catch (Exception e){
e.printStackTrace();
}
return Result.success();
}
@ResponseBody
@PostMapping("/uploadFile2")
@ApiOperation(value = "上传本地文件(带文件的方式)")
public Result uploadFile2(@RequestParam("file") MultipartFile file,String bean){
try{
init();
FileSystem fs = FileSystem.get(conf);
InputStream in = new BufferedInputStream(file.getInputStream());
String filename = file.getOriginalFilename();
FSDataOutputStream out = fs.create(new Path("/user/iof/DatasourceTest/"+filename));
IOUtils.copyBytes(in,out,conf);
}catch (Exception e){
e.printStackTrace();
}
return Result.success();
}
@ResponseBody
@RequestMapping("/downloadFile")
@ApiOperation(value = "下载文件")
public Result downloadFile(HttpServletResponse response, String hdfsPath, String locahPath){
InputStream in=null;
OutputStream out=null;
try{
init();
FileSystem fs = FileSystem.get(conf);
in = fs.open(new Path(hdfsPath));
response.setContentType("application/x-msDownload;charset=utf-8");
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Disposition", "attachment;filename=" + new String("文件下载.xlsx".getBytes(),"utf-8"));
ServletOutputStream sout = response.getOutputStream();
assert sout != null;
try (BufferedInputStream bis = new BufferedInputStream(in); BufferedOutputStream bos = new BufferedOutputStream(sout)) {
byte[] buff = new byte[2048];
int bytesRead;
while (-1 != (bytesRead = bis.read(buff, 0, buff.length))) {
bos.write(buff, 0, bytesRead);
}
bos.flush();
}
}catch (Exception e){
e.printStackTrace();
}
return Result.success();
}
@ResponseBody
@RequestMapping("/readSheet")
@ApiOperation(value = "获取excel的sheet页")
public Result readSheet(String path) throws Exception {
XSSFWorkbook workbook=new XSSFWorkbook(new FileInputStream(new File("C:/Users/Administrator/Desktop/test/DatasourceTest2.xlsx")));
List<String> sheetnames=new ArrayList<>();
for (int i = 0; i < workbook.getNumberOfSheets(); i++) {
sheetnames.add(workbook.getSheetAt(i).getSheetName());
}
return Result.success(sheetnames);
}
@ResponseBody
@RequestMapping("/getExcelColumn")
@ApiOperation(value = "获取excel每一列的字段")
public Result getExcelColumn(@RequestParam("file") MultipartFile file) throws Exception {
int sheetNum=0;
String fileHouZhui="xlsx";
Boolean userHeader=true;
Integer startRow=1;
Integer startColumn=1;
Workbook wb = null;
if("xls".equals(fileHouZhui)){
wb=new HSSFWorkbook(new FileInputStream(zhuanhuan(file)));
}else if("xlsx".equals(fileHouZhui)){
wb=new XSSFWorkbook(new FileInputStream(zhuanhuan(file)));
}else{
System.out.println("文件格式错误!");
}
Sheet sheet = wb.getSheetAt(sheetNum);
int num = sheet.getRow(0).getPhysicalNumberOfCells();
int columnNo=0;
for (int i=startColumn-1;i<num;i++) {
if(userHeader){
String header = sheet.getRow(startRow-1).getCell(i).toString();
System.out.println("header"+i+": "+header);
}else{
String header = "column"+columnNo;
columnNo++;
System.out.println("header"+i+": "+header);
}
}
return Result.success();
}
@ResponseBody
@RequestMapping("/getExcelColumnType")
@ApiOperation(value = "获取excel第一行推断字段类型")
public Result getExcelColumnType(int sheetNum,String path) throws Exception {
XSSFWorkbook workbook=new XSSFWorkbook(new FileInputStream(new File(path)));
XSSFSheet sheet = workbook.getSheetAt(sheetNum);
XSSFRow row = sheet.getRow(1);
for(int i=0;i<row.getLastCellNum();i++){
System.out.println("i-------"+row.getCell(i));
}
return Result.success();
}
@ResponseBody
@RequestMapping("/getCsvColumn")
@ApiOperation(value = "获取Csv每一列的字段")
public Result getCsvColumn(@RequestParam("file") MultipartFile file) throws Exception {
try (BufferedReader fileReader = new BufferedReader(new InputStreamReader(new FileInputStream(zhuanhuan(file)), "GB2312"))) {
String record=fileReader.readLine();
String[] cells = record.split(",");
for (String cell : cells) {
System.out.println(cell);
}
} catch (Exception e) {
}
return Result.success();
}
private File zhuanhuan(MultipartFile file) throws Exception{
File finalFile = new File(file.getOriginalFilename());
file.transferTo(finalFile);
return finalFile;
}
private File multipartFileToFile(MultipartFile file) throws Exception {
File toFile = null;
if (file.equals("") || file.getSize() <= 0) {
file = null;
} else {
InputStream ins = null;
ins = file.getInputStream();
toFile = new File(file.getOriginalFilename());
inputStreamToFile(ins, toFile);
ins.close();
}
return toFile;
}
private void inputStreamToFile(InputStream ins, File file) {
try {
OutputStream os = new FileOutputStream(file);
int bytesRead = 0;
byte[] buffer = new byte[8192];
while ((bytesRead = ins.read(buffer, 0, 8192)) != -1) {
os.write(buffer, 0, bytesRead);
}
os.close();
ins.close();
} catch (Exception e) {
e.printStackTrace();
}
}
@ResponseBody
@RequestMapping("/mergeFile")
@ApiOperation(value = "文件合并")
public Result mergeFile(String inputPath,String outPath) throws Exception{
init();
FileSystem fsSource = FileSystem.get(URI.create(inputPath),conf);
FileSystem fsDst= FileSystem.get(URI.create(outPath),conf);
FileStatus[] sourceStatus = fsSource.listStatus(new Path(inputPath),new MyPathFilter(".*\\.csv"));
FSDataOutputStream fsdos = fsDst.create(new Path(outPath));
for (FileStatus sta : sourceStatus) {
System.out.print("Path: " + sta.getPath() + "FileSize: " + sta.getLen() + "Limit: " + sta.getPermission() + "Content: ");
FSDataInputStream fsdis = fsSource.open(sta.getPath());
byte[] data = new byte[1024];
int read = -1;
PrintStream ps = new PrintStream(System.out);
while ((read = fsdis.read(data)) > 0) {
ps.write(data,0,read);
fsdos.write(data, 0, read);
}
fsdis.close();
ps.close();
}
fsdos.close();
return Result.success();
}
@ResponseBody
@RequestMapping("/mergeFile2")
@ApiOperation(value = "文件合并2")
public Result mergeFile2(String inputPath,String outPath) throws Exception{
init();
FileSystem fsSource = FileSystem.get(URI.create(inputPath),conf);
FileSystem fsDst= FileSystem.get(URI.create(outPath),conf);
FileStatus[] sourceStatus = fsSource.listStatus(new Path(inputPath),new MyPathFilter(".*\\.csv"));
FSDataOutputStream fsdos = fsDst.create(new Path(outPath));
int count=0;
for (FileStatus sta : sourceStatus) {
System.out.println("Path: " + sta.getPath() + "FileSize: " + sta.getLen() + "Limit: " + sta.getPermission() + "Content: ");
FSDataInputStream fsdis = fsSource.open(sta.getPath());
Reader reader = new InputStreamReader(fsdis,"UTF-8");
BufferedReader bufferedReader = new BufferedReader(reader);
String content = null;
if(count!=0){
bufferedReader.readLine();
}
count++;
while((content = bufferedReader.readLine()) != null) {
content=content+"\n";
fsdos.write(content.getBytes(), 0, content.getBytes().length);
System.out.println("-------"+content);
}
}
fsdos.close();
fsdos.close();
return Result.success();
}
@ResponseBody
@RequestMapping("/mergeFile3")
@ApiOperation(value = "文件合并3")
public Result mergeFile3() throws Exception{
try {
String[] stringList;
String csvFilePath = "C:\\Users\\Administrator\\Desktop\\20140227135936.csv";
String sourceFileString= "C:\\Users\\Administrator\\Desktop\\test.csv";
CsvReader reader = new CsvReader(csvFilePath);
CsvWriter writer = new CsvWriter(sourceFileString);
reader.readRecord();
while(reader.readRecord()){
stringList = reader.getValues();
stringList[11] = 'V' + stringList[11];
writer.writeRecord(stringList);
}
reader.close();
writer.close();
}catch(Exception ex) {
ex.printStackTrace();
}
return Result.success();
}
@ResponseBody
@RequestMapping("/downloadFileCsv")
@ApiOperation(value = "下载文件CSV")
public Result downloadFileCsv(HttpServletResponse response, String hdfsPath, String locahPath) {
InputStream in = null;
OutputStream out = null;
try {
init();
FileSystem fs = FileSystem.get(conf);
in = fs.open(new Path(hdfsPath));
response.setContentType("application/x-msDownload;charset=utf-8");
response.setCharacterEncoding("UTF-8");
response.setHeader("Content-Disposition", "attachment;filename=" + new String("文件下载.xlsx".getBytes(), "utf-8"));
ServletOutputStream sout = response.getOutputStream();
assert sout != null;
try (BufferedInputStream bis = new BufferedInputStream(in); BufferedOutputStream bos = new BufferedOutputStream(sout)) {
byte[] buff = new byte[2048];
int bytesRead;
while (-1 != (bytesRead = bis.read(buff, 0, buff.length))) {
bos.write(buff, 0, bytesRead);
}
bos.flush();
}
} catch (Exception e) {
e.printStackTrace();
}
return Result.success();
}
@ResponseBody
@RequestMapping("/rename")
@ApiOperation(value = "文件重命名")
public void rename(String oldName,String newName) throws IOException{
conf.set("fs.defaultFS", hdfsUrl);
FileSystem fs = FileSystem.get(conf);
Path oldPath = new Path(oldName);
Path newPath = new Path(newName);
boolean isok = fs.rename(oldPath, newPath);
if(isok){
System.out.println("rename ok!");
}else{
System.out.println("rename failure");
}
fs.close();
}
public static void delete3(String filePath) throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path path = new Path(filePath);
boolean isok = fs.deleteOnExit(path);
if(isok){
System.out.println("delete ok!");
}else{
System.out.println("delete failure");
}
fs.close();
}
@ResponseBody
@RequestMapping("/mkdir")
@ApiOperation(value = "创建目录")
public Result mkdir(String path) throws IOException{
conf.set("fs.defaultFS", hdfsUrl);
FileSystem fs = FileSystem.get(conf);
Path srcPath = new Path(path);
boolean isok = fs.mkdirs(srcPath);
if(isok){
System.out.println("create " + path + " dir ok!");
}else{
System.out.println("create " + path + " dir failure");
}
fs.close();
return Result.success(isok);
}
public static void readFile(String filePath) throws IOException{
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path srcPath = new Path(filePath);
InputStream in = null;
try {
in = fs.open(srcPath);
IOUtils.copyBytes(in, System.out, 4096, false);
} finally {
IOUtils.closeStream(in);
}
}
@ResponseBody
@RequestMapping("/getDirectoryFromHdfs")
@ApiOperation(value = "遍历指定目录(direPath)下的所有文件")
public void getDirectoryFromHdfs(String direPath){
try {
conf.set("fs.defaultFS", hdfsUrl);
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
FileSystem fs = FileSystem.get(URI.create(direPath),conf);
FileStatus[] filelist = fs.listStatus(new Path(direPath));
for (int i = 0; i < filelist.length; i++) {
System.out.println("_________" + direPath + "目录下所有文件______________");
FileStatus fileStatus = filelist[i];
System.out.println("Name:"+fileStatus.getPath().getName());
System.out.println("Size:"+fileStatus.getLen());
System.out.println("Path:"+fileStatus.getPath());
}
fs.close();
} catch (Exception e){
e.printStackTrace();
}
}
public static void main(String[] args) throws IOException {
System.out.println("AB属于第几列 ------"+excelColStrToNum("iv"));
}
public static int excelColStrToNum(String column) {
int num = 0;
int result = 0;
int length =column.length();
for(int i = 0; i < length; i++) {
char ch = column.charAt(length - i - 1);
num = (int)(ch - 'A' + 1) ;
num *= Math.pow(26, i);
result += num;
}
return result;
}
}
package com.dragonsoft.cicada.datacenter.modules;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
public class MyPathFilter implements PathFilter{
String reg = null;
public MyPathFilter(String reg){
this.reg = reg;
}
@Override
public boolean accept(Path path){
if(path.toString().matches(reg)){
return true;
}
return false;
}
}