SpringBoot整合hdfs,实现文件上传下载删除与批量删除,以及vue前端发送请求,实现前后端交互功能;

部分工具类代码参考文章:https://blog.csdn.net/qq_27242695/article/details/119683823

前端实现效果

HDFSController

package com.jack.graduation.controller;

import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.crypto.SecureUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.jack.graduation.bean.FileInfo;
import com.jack.graduation.common.Constants;
import com.jack.graduation.common.Result;
import com.jack.graduation.service.FileService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.HashSet;
import java.util.List;

/**
 * @BelongsProject: graduation
 * @BelongsPackage: com.jack.graduation.controller
 * @Author: jack
 * @CreateTime: 2023-01-05  17:27
 * @Description: TODO 文件上传接口
 * @Version: jdk1.8
 */

@RestController
@RequestMapping("/file")
public class FileController {
    @Autowired
    private FileService fileService;

    @PostMapping("/uploadToHdfs")
    public Result uploadToHdfs(@RequestParam MultipartFile file) throws Exception {
        String originalFilename = file.getOriginalFilename(); //文件名
        String type = FileUtil.extName(originalFilename);//获取文件扩展名(文件类型后缀名),扩展名不带“.”
        if (!"csv".equals(type)) {
            //throw new ServiceException(Constants.CODE_400, "文件类型必须是csv逗号分隔文件!");
            return Result.error(Constants.CODE_400, "文件类型必须是csv逗号分隔文件!");
        }
        //文件大小
        long size = file.getSize();
        // 定义一个文件唯一的标识码
        String uuid = IdUtil.fastSimpleUUID();
        //新的文件名
        String newOriginalFilename = uuid + StrUtil.DOT + type;
        String md5 = SecureUtil.md5(file.getInputStream());
        //下载路径
        String url = "http://localhost:9090/file/" + newOriginalFilename;
        FileInfo fileInfo = new FileInfo(null, originalFilename, md5, uuid, type, size / 1024, url, null, null, null, null);
        //信息写进数据库
        fileService.save(fileInfo);
        //存储到hdfs
        boolean res = fileService.uploadHdfs(file, newOriginalFilename);
        if (res) {
            return Result.success("文件上传成功!");
        } else {
            return Result.error(Constants.CODE_500, "服务器错误!");
        }
    }

    /**
     * 清洗后的文件,从hdfs下载
     *
     * @param newFileName 文件唯一标识
     * @param isEtl       是否清洗标识
     * @param response    响应体
     * @throws IOException exception
     */
    @GetMapping("/{newFileName}/{isEtl}")
    public void downloadFile(@PathVariable String newFileName, @PathVariable Integer isEtl, HttpServletResponse response) {

        ServletOutputStream os = null;
        // 设置输出流的格式
        try {
            os = response.getOutputStream();
            response.addHeader("Content-Disposition", "attachment;filename=" + URLEncoder.encode(newFileName, "UTF-8"));
            response.setContentType("application/octet-stream");
            byte[] resBytes = fileService.downloadHdfsFile(newFileName, isEtl);
            // 读取文件的字节流
            os.write(resBytes);
            os.flush();
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (os!=null){
                try {
                    os.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }

    @DeleteMapping("/deleteFile/{id}")
    public Result deleteFile(@PathVariable Integer id) {
        QueryWrapper<FileInfo> queryWrapper = new QueryWrapper<>();
        queryWrapper.eq("id", id);
        FileInfo fileInfo = fileService.getOne(queryWrapper);
        if (fileService.removeHdfsFile(fileInfo) && fileService.removeById(id)) {
            return Result.success("文件删除成功");
        } else {
            return Result.error(Constants.CODE_500, "hdfs文件删除失败");
        }
    }

    //批量删除数据
    @PostMapping("/delFileBatch")
    public Result delUserBatch(@RequestBody List<Integer> ids) {
        QueryWrapper<FileInfo> queryWrapper = new QueryWrapper<>();
        queryWrapper.in("id", ids);
        List<FileInfo> fileInfoList = fileService.list(queryWrapper);
        HashSet<String> resSet = fileService.removeHdfsFileBatch(fileInfoList);
        if (resSet.isEmpty() && fileService.removeByIds(ids)) {
            return Result.success("批量删除文件成功");
        } else {
            return Result.error(Constants.CODE_500, resSet.toString());
        }
    }

    //根据md5查找文件是否存在
    public FileInfo getFileByMd5(String md5) {
        QueryWrapper<FileInfo> queryWrapper = new QueryWrapper<>();
        queryWrapper.eq("file_md5", md5);
        FileInfo fileInfo = fileService.getOne(queryWrapper);
        return fileInfo;
    }

    //分页数据
    @RequestMapping("/page")
    public Result getPage(@RequestParam Integer pageNum,
                          @RequestParam Integer pageSize,
                          @RequestParam(defaultValue = "") String fileName,
                          @RequestParam(defaultValue = "") String id,
                          @RequestParam(defaultValue = "") String uuid
    ) {
        List<FileInfo> list = fileService.list();
        IPage<FileInfo> page = new Page<>(pageNum, pageSize);
        QueryWrapper<FileInfo> wrapper = new QueryWrapper<>();

        //根据username搜索
        if (!"".equals(fileName)) {
            wrapper.eq("file_name", fileName);
        }
        //根据id搜索
        if (!"".equals(id)) {
            wrapper.and(wra -> wra.eq("id", Integer.valueOf(id)));
        }
        //根据uuid搜索
        if (!"".equals(uuid)) {
            wrapper.eq("uuid", uuid);
        }
        //倒序排
        wrapper.orderByDesc("id");
        IPage<FileInfo> iPage = fileService.page(page, wrapper);
        return Result.success(iPage);
    }
}

HDFS FileInterface (文件接口)

package com.jack.graduation.service;

import com.baomidou.mybatisplus.extension.service.IService;
import com.jack.graduation.bean.FileInfo;
import org.springframework.web.multipart.MultipartFile;

import java.util.HashSet;
import java.util.List;

public interface FileService extends IService<FileInfo> {
    //上传hdfs方法
    boolean uploadHdfs(MultipartFile file, String fileName);

    boolean removeHdfsFile(FileInfo fileInfo);

    byte[] downloadHdfsFile(String fileUUID, Integer isEtl);

    HashSet<String> removeHdfsFileBatch(List<FileInfo> fileInfoList);
}

HDFS FileImplService (文件接口实现类)

package com.jack.graduation.service.impl;

import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.jack.graduation.bean.FileInfo;
import com.jack.graduation.common.Constants;
import com.jack.graduation.config.HdfsConfig;
import com.jack.graduation.exception.ServiceException;
import com.jack.graduation.mapper.FileMapper;
import com.jack.graduation.service.FileService;
import com.jack.graduation.utils.HdfsUtil;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;

/**
 * @BelongsProject: graduation
 * @BelongsPackage: com.jack.graduation.service.impl
 * @Author: jack
 * @CreateTime: 2023-01-05  18:48
 * @Description: TODO
 * @Version: jdk1.8
 */
@Service
public class FileServiceImpl extends ServiceImpl<FileMapper, FileInfo> implements FileService {


    @Autowired
    private HdfsUtil hdfsUtil;
    @Autowired
    private HdfsConfig hdfsConfig;

    /**
     * @param file     前端传过来的文件
     * @param fileName 文件名
     * @return
     */
    @Override
    public boolean uploadHdfs(MultipartFile file, String fileName) {
        boolean res = false;
        try {
            hdfsUtil.createFile(hdfsConfig.getHdfsPath() + fileName, file, fileName);
            res = hdfsUtil.existFile(hdfsConfig.getHdfsPath() + fileName);
            if (res) {
                return true;
            }
        } catch (Exception e) {
            throw new ServiceException(Constants.CODE_500, "hdfs io error!");
        }
        return res;
    }

    @Override
    public boolean removeHdfsFile(FileInfo fileInfo) {
        boolean res = false;
        String filename = fileInfo.getUuid() + StrUtil.DOT + fileInfo.getFileType();

        try {
            //未清洗文件路径IsEtl==0
            if (fileInfo.getIsEtl() == 0) {
                res = hdfsUtil.deleteFile(hdfsConfig.getHdfsPath() + filename);
            } else {
                res = hdfsUtil.deleteFile(hdfsConfig.getHdfsCleanPath() + filename);
            }
        } catch (Exception e) {
            throw new ServiceException(Constants.CODE_500, "删除hdfs文件失败!");
        }
        return res;
    }

    @Override
    public byte[] downloadHdfsFile(String newFileName, Integer isEtl) {

        FileSystem fs = null;
        System.out.println("filename:"+newFileName);
        FSDataInputStream fis = null;
        byte[] resBytes;
        try {
            //文件名
            fs = hdfsUtil.getFileSystem();
            if (isEtl == 0) {
                //创建输入流
                System.out.println("hdfs:"+hdfsConfig.getHdfsPath() + newFileName);
                fis = fs.open(new Path(hdfsConfig.getHdfsPath() + newFileName));
                resBytes = IOUtils.readFullyToByteArray(fis);
            } else {
                //创建输入流
                fis = fs.open(new Path(hdfsConfig.getHdfsCleanPath() + newFileName));
                resBytes = IOUtils.readFullyToByteArray(fis);
            }
        } catch (Exception e) {
            throw new ServiceException(Constants.CODE_500, "hdfs文件下载失败!");
            //e.printStackTrace();
        } finally {
            IOUtils.closeStream(fis);
            if (fs != null) {
                try {
                    //关流
                    fs.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }

            }
        }
        return resBytes;
    }

    @Override
    public HashSet<String> removeHdfsFileBatch(List<FileInfo> fileInfoList) {
        boolean res = false;
        HashSet<String> resSet = new HashSet<>();
        for (FileInfo fileInfo : fileInfoList) {
            String filename = fileInfo.getUuid() + StrUtil.DOT + fileInfo.getFileType();
            try {
                //未清洗文件路径IsEtl==0
                if (fileInfo.getIsEtl() == 0) {
                    res = hdfsUtil.deleteFile(hdfsConfig.getHdfsPath() + filename);
                    if (!res) {
                        resSet.add(fileInfo.getFileName() + "删除失败!");
                    }
                } else {
                    res = hdfsUtil.deleteFile(hdfsConfig.getHdfsCleanPath() + filename);
                    if (!res) {
                        resSet.add(fileInfo.getFileName() + "删除失败!");
                    }
                }
            } catch (Exception e) {
                throw new ServiceException(Constants.CODE_500, resSet.toString());
            }
        }
        return resSet;
    }

}

HDFSConfig(从yaml读取文件)

package com.jack.graduation.config;

import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
import org.springframework.stereotype.Component;

/**
 * @BelongsProject: graduation
 * @BelongsPackage: com.jack.graduation.config
 * @Author: jack
 * @CreateTime: 2023-01-03  01:38
 * @Description: TODO:hdfs配置类
 * @Version: jdk1.8
 */
@Configuration
@Data
@NoArgsConstructor
@AllArgsConstructor
public class HdfsConfig {
    // hdfs nameNode连接URL
    @Value("${nameNode.url}")
    private String nameNodeUrl;

    // 操作用户
    @Value("${hdfs.userName}")
    private String hdfsUserName;

    // 操作存储节点路径
    @Value("${hdfs.dataNode}/")
    private String pdfDataNode;

    //hdfs存储路径
    @Value("${nameNode.hdfsPath}")
    private String hdfsPath;

    //hdfs清洗存储路径
    @Value("${nameNode.hdfsCleanPath}")
    private String hdfsCleanPath;

}

HDFSUTils

package com.jack.graduation.utils;

import com.alibaba.druid.util.StringUtils;
import com.jack.graduation.config.HdfsConfig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile;

import java.io.*;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @BelongsProject: graduation
 * @BelongsPackage: com.jack.graduation.utils
 * @Author: jack
 * @CreateTime: 2023-01-03  01:40
 * @Description: TODO hdfs工具类
 * @Version: jdk1.8
 */
@Component
public class HdfsUtil {
    public static final Logger logger = LoggerFactory.getLogger(HdfsUtil.class);


    @Autowired
    private HdfsConfig hdfsConfig;

    /**
     * 获取HDFS配置信息 配置文件优先级
     * Configuration  > resource下的hdfs-site.xml > 服务器上的 hdfs-default.xml
     *
     * @return
     */
    private Configuration getConfiguration() {
        Configuration configuration = new Configuration();
        configuration.set("dfs.support.append", "true");
        configuration.set("dfs.client.block.write.replace-datanode-on-failure.enable", "true");
        configuration.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
        return configuration;
    }


    /**
     * 获取HDFS文件系统对象
     *
     * @return
     * @throws Exception
     */
    public FileSystem getFileSystem() throws Exception {
        // 客户端去操作hdfs时是有一个用户身份的,默认情况下hdfs客户端api会从jvm中获取一个参数作为自己的用户身份
        // DHADOOP_USER_NAME=hadoop
        // 也可以在构造客户端fs对象时,通过参数传递进去
        System.out.println(hdfsConfig.getNameNodeUrl());
        System.out.println(hdfsConfig.getPdfDataNode());
        System.out.println(hdfsConfig.getHdfsUserName());

        FileSystem fileSystem = FileSystem.get(
                new URI(hdfsConfig.getNameNodeUrl()),
                getConfiguration(), hdfsConfig.getHdfsUserName());
        return fileSystem;
    }

    /**
     * 在HDFS创建文件夹
     *
     * @param path
     * @return
     * @throws Exception
     */
    public boolean mkdir(String path) throws Exception {
        FileSystem fs = null;
        boolean isOk = false;
        if (StringUtils.isEmpty(path)) {
            return false;
        }
        try {
            if (existFile(path)) {
                logger.error("hdfs file is exists: {}", path);
                return true;
            }
            // 目标路径
            fs = getFileSystem();
            Path srcPath = new Path(path);
            isOk = fs.mkdirs(srcPath);
            logger.error("hdfs mkdir success: {}", path);
        } catch (Exception e) {
            logger.error("hdfs mkdir: {}", e);
        } finally {
            if (fs != null) {
                fs.close();
            }
        }
        return isOk;
    }

    /**
     * 判断HDFS文件是否存在
     *
     * @param path
     * @return
     * @throws Exception
     */
    public boolean existFile(String path) throws Exception {
        Boolean isExists = false;
        FileSystem fs = null;
        if (StringUtils.isEmpty(path)) {
            return false;
        }
        try {
            fs = getFileSystem();
            Path srcPath = new Path(path);
            isExists = fs.exists(srcPath);
        } catch (Exception e) {
            logger.error("existFile {}", e);
        } finally {
            if (fs != null) {
                fs.close();
            }
        }
        return isExists;
    }

    /**
     * 读取HDFS目录信息
     *
     * @param path
     * @return
     * @throws Exception
     */
    public List<Map<String, Object>> readPathInfo(String path) throws Exception {
        try {
            if (StringUtils.isEmpty(path)) {
                return null;
            }
            if (!existFile(path)) {
                return null;
            }
            FileSystem fs = getFileSystem();
            // 目标路径
            Path newPath = new Path(path);
            FileStatus[] statusList = fs.listStatus(newPath);
            List<Map<String, Object>> list = new ArrayList<>();
            if (null != statusList && statusList.length > 0) {
                for (FileStatus fileStatus : statusList) {
                    Map<String, Object> map = new HashMap<>();
                    map.put("filePath", fileStatus.getPath());
                    map.put("fileStatus", fileStatus.toString());
                    list.add(map);
                }
                return list;
            }
        } catch (Exception e) {
            logger.error("hdfs readPathInfo {}", e);
        }
        return null;
    }

    /**
     * HDFS创建文件
     *
     * @param path 上传的路径
     * @param file
     * @throws Exception
     */
    public void createFile(String path, MultipartFile file) throws Exception {
        if (StringUtils.isEmpty(path) || null == file.getBytes()) {
            return;
        }
        FileSystem fs = null;
        FSDataOutputStream outputStream = null;
        try {
            fs = getFileSystem();
            String fileName = file.getOriginalFilename();
            // 上传时默认当前目录,后面自动拼接文件的目录
            Path newPath = new Path(path + "/" + fileName);
            // 打开一个输出流
            outputStream = fs.create(newPath);
            outputStream.write(file.getBytes());
            outputStream.flush();
        } catch (Exception e) {
            throw e;
        } finally {
            if (outputStream != null) {
                outputStream.close();
            }

            if (fs != null) {
                fs.close();
            }
        }
    }

    public void createFile(String path, MultipartFile file,String newFilename) throws Exception {
        if (StringUtils.isEmpty(path) || null == file.getBytes()) {
            return;
        }
        FileSystem fs = null;
        FSDataOutputStream outputStream = null;
        try {
            fs = getFileSystem();
            // 上传时默认当前目录,后面自动拼接文件的目录
            Path newPath = new Path(path);
            // 打开一个输出流
            outputStream = fs.create(newPath);
            outputStream.write(file.getBytes());
            outputStream.flush();
        } catch (Exception e) {
            throw e;
        } finally {
            if (outputStream != null) {
                outputStream.close();
            }

            if (fs != null) {
                fs.close();
            }
        }
    }


    /**
     * 直接往输出流输出文件
     *
     * @param path 活动方式 远程文件
     * @param os   输出流
     * @return
     * @throws Exception
     */
    public void writeOutputStreamFile(OutputStream os, String path) throws Exception {
        if (StringUtils.isEmpty(path)) {
            return;
        }
/*        if (!existFile(path)) {
            // 文件不存在则抛出异常
            throw new Exception(path + " hdfs文件不存在");
        }*/
        FileSystem fs = null;
        FSDataInputStream inputStream = null;
        try {
            // 目标路径
            Path srcPath = new Path(path);
            fs = getFileSystem();
            inputStream = fs.open(srcPath);
            // 防止中文乱码
            // BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
            fileDownload(os, new BufferedInputStream(inputStream));
        } catch (Exception e) {
            throw e;
        } finally {
            if (inputStream != null) {
                inputStream.close();
            }
            if (fs != null) {
                fs.close();
            }
        }
    }

    /**
     * 读取HDFS文件内容
     *
     * @param path
     * @return
     * @throws Exception
     */
    public String readFile(String path) throws Exception {
        if (StringUtils.isEmpty(path)) {
            return null;
        }
        if (!existFile(path)) {
            return null;
        }
        FileSystem fs = null;
        FSDataInputStream inputStream = null;
        try {
            // 目标路径
            Path srcPath = new Path(path);
            fs = getFileSystem();
            inputStream = fs.open(srcPath);
            // 防止中文乱码
            BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
            String lineTxt = "";
            StringBuffer sb = new StringBuffer();
            while ((lineTxt = reader.readLine()) != null) {
                sb.append(lineTxt);
            }
            return sb.toString();
        } finally {
            if (inputStream != null) {
                inputStream.close();
            }
            if (fs != null) {
                fs.close();
            }
        }
    }


    /**
     * 读取HDFS文件列表
     *
     * @param path
     * @return
     * @throws Exception
     */
    public List<Map<String, String>> listFile(String path) throws Exception {
        if (StringUtils.isEmpty(path)) {
            return null;
        }
        if (!existFile(path)) {
            return null;
        }

        FileSystem fs = null;
        try {
            fs = getFileSystem();
            // 目标路径
            Path srcPath = new Path(path);
            // 递归找到所有文件
            RemoteIterator<LocatedFileStatus> filesList = fs.listFiles(srcPath, true);
            List<Map<String, String>> returnList = new ArrayList<>();
            while (filesList.hasNext()) {
                LocatedFileStatus next = filesList.next();
                String fileName = next.getPath().getName();
                Path filePath = next.getPath();
                Map<String, String> map = new HashMap<>();
                map.put("fileName", fileName);
                map.put("filePath", filePath.toString());
                returnList.add(map);
            }
            return returnList;
        } catch (Exception e) {
            logger.error("hdfs listFile {}", e);
        } finally {
            if (fs != null) {
                fs.close();

            }
        }
        return null;
    }


    /**
     * HDFS重命名文件
     *
     * @param oldName
     * @param newName
     * @return
     * @throws Exception
     */
    public boolean renameFile(String oldName, String newName) throws Exception {
        if (StringUtils.isEmpty(oldName) || StringUtils.isEmpty(newName)) {
            return false;
        }
        FileSystem fs = null;
        Boolean isOk = false;
        try {
            fs = getFileSystem();
            // 原文件目标路径
            Path oldPath = new Path(oldName);
            // 重命名目标路径
            Path newPath = new Path(newName);
            isOk = fs.rename(oldPath, newPath);

            return isOk;
        } catch (Exception e) {
            logger.error("hdfs renameFile {}", e);
        } finally {
            if (fs != null) {
                fs.close();
            }
        }
        return isOk;
    }


    /**
     * 删除HDFS文件
     *
     * @param path
     * @return
     * @throws Exception
     */
    public boolean deleteFile(String path) throws Exception {
        if (StringUtils.isEmpty(path)) {
            return false;
        }

        FileSystem fs = null;
        Boolean isOk = false;
        try {
            if (!existFile(path)) {
                return false;
            }
            fs = getFileSystem();
            Path srcPath = new Path(path);
            isOk = fs.deleteOnExit(srcPath);
        } catch (Exception e) {
            logger.error("hdfs deleteFile {}", e);
        } finally {
            if (fs != null) {
                fs.close();
            }
        }
        return isOk;
    }

    /**
     * 上传HDFS文件
     *
     * @param path       上传路径(本服务器文件全路径)
     * @param uploadPath 目标路径(全节点路径)
     * @throws Exception
     */
    public void uploadFile(String path, String uploadPath) throws Exception {
        if (StringUtils.isEmpty(path) || StringUtils.isEmpty(uploadPath)) {
            return;
        }
        FileSystem fs = null;
        try {
            fs = getFileSystem();
            // 上传路径
            Path clientPath = new Path(path);
            // 目标路径
            Path serverPath = new Path(uploadPath);
            // 调用文件系统的文件复制方法,第一个参数是否删除本地文件  true为删除,默认为false
            fs.copyFromLocalFile(false, clientPath, serverPath);
        } catch (Exception e) {
            logger.error("hdfs uploadFile {}", e);
        } finally {
            if (fs != null) {
                fs.close();
            }
        }

    }


    /**
     * 下载HDFS文件
     *
     * @param path         hdfs目标路径
     * @param downloadPath 客户端存放路径
     * @throws Exception
     */
    public void downloadFile(String path, String downloadPath) throws Exception {
        if (StringUtils.isEmpty(path) || StringUtils.isEmpty(downloadPath)) {
            return;
        }
        FileSystem fs = null;
        try {
            fs = getFileSystem();
            // hdfs目标路径
            Path clientPath = new Path(path);
            // 客户端存放路径
            Path serverPath = new Path(downloadPath);
            // 调用文件系统的文件复制方法,第一个参数是否删除原文件 true为删除,默认为false
            fs.copyToLocalFile(false, clientPath, serverPath);
        } catch (Exception e) {
            logger.error("hdfs downloadFile {}", e);
        } finally {
            if (fs != null) {
                fs.close();
            }
        }
    }

    /**
     * HDFS文件复制
     * @param sourcePath
     * @param targetPath
     * @throws Exception
     */
    /*public void copyFile(String sourcePath, String targetPath) throws Exception {
        if (StringUtils.isEmpty(sourcePath) || StringUtils.isEmpty(targetPath)) {
            return;
        }
        FileSystem fs = getFileSystem();
        // 原始文件路径
        Path oldPath = new Path(sourcePath);
        // 目标路径
        Path newPath = new Path(targetPath);

        FSDataInputStream inputStream = null;
        FSDataOutputStream outputStream = null;
        try {
            inputStream = fs.open(oldPath);
            outputStream = fs.create(newPath);

            IOUtils.copyBytes(inputStream, outputStream, bufferSize, false);
        } finally {
            inputStream.close();
            outputStream.close();
            fs.close();
        }
    }

    *//**
     * 打开HDFS上的文件并返回byte数组
     * @param path
     * @return
     * @throws Exception
     *//*
    public byte[] openFileToBytes(String path) throws Exception {
        if (StringUtils.isEmpty(path)) {
            return null;
        }
        if (!existFile(path)) {
            return null;
        }
        FileSystem fs = getFileSystem();
        // 目标路径
        Path srcPath = new Path(path);
        try {
            FSDataInputStream inputStream = fs.open(srcPath);
            return IOUtils.readFullyToByteArray(inputStream);
        } finally {
            fs.close();
        }
    }

    *//**
     * 打开HDFS上的文件并返回java对象
     * @param path
     * @return
     * @throws Exception
     *//*
    public <T extends Object> T openFileToObject(String path, Class<T> clazz) throws Exception {
        if (StringUtils.isEmpty(path)) {
            return null;
        }
        if (!existFile(path)) {
            return null;
        }
        String jsonStr = readFile(path);
        return JsonUtil.fromObject(jsonStr, clazz);
    }

    *//**
     * 获取某个文件在HDFS的集群位置
     * @param path
     * @return
     * @throws Exception
     *//*
    public BlockLocation[] getFileBlockLocations(String path) throws Exception {
        if (StringUtils.isEmpty(path)) {
            return null;
        }
        if (!existFile(path)) {
            return null;
        }
        FileSystem fs = getFileSystem();
        // 目标路径
        Path srcPath = new Path(path);
        FileStatus fileStatus = fs.getFileStatus(srcPath);
        return fs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
    }
*/

    /**
     * @param os  response输出流
     * @param bis 输入流
     */
    private void fileDownload(OutputStream os, BufferedInputStream bis) throws Exception {
        if (bis == null) {
            return;
        }
        try {
            byte[] buff = new byte[1024];
            int i = bis.read(buff);
            while (i != -1) {
                os.write(buff, 0, i);
                os.flush();
                i = bis.read(buff);
            }
        } catch (IOException e) {
            throw e;
        } finally {
            if (bis != null) {
                try {
                    bis.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
    }

}

前端vue代码:

<template>
    <div>
        <div class="searchForm">
            <el-input style="width: 200px" placeholder="请输入ID" v-model="id"
                      prefix-icon="el-icon-search"></el-input>
            <el-input class="ml-5" style="width: 200px" placeholder="请输入文件名" v-model="fileName"
                      prefix-icon="el-icon-search"></el-input>
            <el-input class="ml-5" style="width: 200px" placeholder="请输入uuid" v-model="uuid"
                      prefix-icon="el-icon-search"></el-input>
            <el-button class="ml-5" type="primary" @click="rigthId();getData()">搜索</el-button>
            <el-button class="ml-5" type="warning" @click="reset">重置</el-button>
        </div>

        <el-table :data="tableData" border stripe :header-cell-class-name="headerBg"
                  @selection-change="handleSelectionChange"
                  :header-cell-style="{'text-align':'center'}" :cell-style="{'text-align':'center'}">
            <el-table-column type="selection" width="40"></el-table-column>
            <el-table-column prop="id" label="ID" width="60"></el-table-column>
            <el-table-column prop="fileName" label="文件名" width="70"></el-table-column>
            <el-table-column prop="fileType" label="文件类型" width="70"></el-table-column>
            <el-table-column :formatter="formatIsEtl" prop="isEtl" label="是否清洗" width="70"></el-table-column>
            <el-table-column prop="uploadTime" label="上传时间" width="90"></el-table-column>
            <!--            <el-table-column prop="updateTime" label="修改时间" width="90"></el-table-column>-->
            <el-table-column prop="etlTime" label="清洗时间" width="90"></el-table-column>
            <el-table-column prop="fileSize" label="大小(kb)" width="70"></el-table-column>
            <el-table-column prop="uuid" label="uuid" width="245"></el-table-column>
            <el-table-column prop="url" label="下载地址" width="440"></el-table-column>
            <el-table-column label="操作" width="220" align="center">
                <template slot-scope="scope">
                    <el-button style="width: 60px;margin-left: 1px;text-align: center" type="success"
                               @click="cleanFile(scope.row)">清洗
                        <i
                                class="el-icon-coin"></i>
                    </el-button>
                    <el-button type="primary" @click="downloadFile(scope.row)"
                               style="width: 60px;margin-left: 1px;text-align: center">下载 <i
                            class="el-icon-caret-bottom"></i></el-button>
                    <el-popconfirm
                            class="ml-5"
                            confirm-button-text='确定'
                            cancel-button-text='我再想想'
                            icon="el-icon-info"
                            icon-color="red"
                            title="您确定删除吗?"
                            @confirm="delFile(scope.row.id)">
                        <el-button type="danger" slot="reference"
                                   style="width: 60px;margin-right: 1px;text-align: center">删除 <i
                                class="el-icon-remove-outline"></i>
                        </el-button>
                    </el-popconfirm>
                </template>
            </el-table-column>
        </el-table>
        <div style=" margin: 10px 0">
            <el-upload action="http://" :show-file-list="false"
                       :on-success="uploadToHdfsSuccess" style="display: inline-block;">
                <el-button type="primary" class="ml-5" style="width: 90px;" @click="uploadToHdfs">上传<i
                        class="el-icon-caret-top"></i>
                </el-button>
            </el-upload>
            <el-popconfirm
                    class="ml-5"
                    confirm-button-text='确定'
                    cancel-button-text='我再想想'
                    icon="el-icon-info"
                    icon-color="red"
                    title="您确定批量删除这些数据吗?"
                    @confirm="delFileBatch"
            >
                <el-button type="danger" slot="reference" class="ml-5" style="width: 90px;">批量删除 <i
                        class="el-icon-remove-outline"></i>
                </el-button>
            </el-popconfirm>

        </div>

        <div class="pagination">
            <el-pagination
                    @size-change="handleSizeChange"
                    @current-change="handleCurrentChange"
                    :current-page="pageNum"
                    :page-sizes="[9, 18, 27, 36]"
                    :page-size="pageSize"
                    layout="total, sizes, prev, pager, next, jumper"
                    :total="total">     <!--分页插件-->
            </el-pagination>
        </div>
    </div>
</template>

<script>
    export default {
        name: "file",
        data() {
            return {
                tableData: [],
                total: 0,
                pageNum: 1,
                fileName: '',
                pageSize: 9,
                dialogFormVisible: false,
                addfileForm: {},
                uuid: '',
                id: '',
                multipleSelection: [],
                headerBg: 'headerBg'
            }
        },
        created() {
            this.getData()
        },
        methods: {
            rigthId() {
                if (isNaN(this.id)) {
                    this.$message({
                        type: "warning",
                        message: "请输入正确输入数字id!"
                    })
                    this.reset()
                }
            },
            reset() {
                this.id = ''
                this.fileName = ''
                this.uuid = ''
                this.getData()
            },
            getData() {
                this.request.get(
                    "/file/page", {
                        params: {
                            pageNum: this.pageNum,
                            pageSize: this.pageSize,
                            fileName: this.fileName,
                            uuid: this.uuid,
                            id: this.id
                        }
                    }
                ).then(res => {
                    console.log(res.data);
                    this.tableData = res.data.records
                    this.total = res.data.total
                })
            },
            //文件保存
            savefile() {
                this.request.post("/file/savefile", this.addfileForm).then(res => {
                    if (res.data) {
                        this.$message.success("添加成功")
                        this.dialogFormVisible = false
                        this.getData()
                    } else {
                        this.$message.error("添加失败")
                        this.dialogFormVisible = false
                    }
                })
            },
            //删除文件
            delFile(id) {
                this.request.delete("/file/deleteFile/" + id).then(res => {
                    if (res.code === "200") {
                        this.$message.success(res.data)
                        this.getData()
                    } else {
                        this.$message.error(res.msg)
                        this.getData()
                    }
                })
            },
            //批量选择用户
            handleSelectionChange(val) {
                console.log(val)
                this.multipleSelection = val
            },
            //批量删除
            delFileBatch() {
                //将ids对象取出变成单个id,放到数组里面
                let ids = this.multipleSelection.map(ids => ids.id);
                //post到服务器
                this.request.post("/file/delFileBatch", ids).then(res => {
                    if (res.code === "200") {
                        this.$message.success(res.data)
                        this.getData()
                    } else {
                        this.$message.error(res.msg)
                        this.getData()
                    }
                })
            },
            //分页数据请求
            handleSizeChange(pageSize) {
                console.log(pageSize)
                this.pageSize = pageSize
                this.getData()

            },
            handleCurrentChange(pageNum) {
                console.log(pageNum)
                this.pageNum = pageNum
                this.getData()
            },
            //将后端的0&1映射为是和否
            formatIsEtl(row) {
                return row.isEtl === 1 ? "已清洗" : "未清洗";
            },
            //文件上传成功回传
            uploadToHdfsSuccess(res) {
                console.log(res);
                if (res.code === '200') {
                    this.getData()
                    this.$message.success("文件上传成功")
                } else if (res.code >= "200") {
                    this.getData()
                    this.$message.error(this.date())
                } else {
                    this.getData()
                    this.$message.success("文件上传成功")
                }
            },

            downloadFile(row) {
                    //如果是清洗过的文件,则从hdfs下载
                    window.open(row.url + "/" + row.isEtl)
            }
        }
    }
</script>
<style>
    .headerBg {
        background: #eee !important;
    }

    .searchForm {
        margin: 10px 0;
    }

    .pagination {
        padding: 10px 0;
        width: max-content;
        margin: 0 auto;
        position: fixed;
        bottom: 10px;
        left: 40%;
    }

</style>
  • 2
    点赞
  • 20
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
实现Spring BootHDFS和MySQL的文件上传下载,需要先配置Hadoop和MySQL环境。然后,需要添加相应的依赖项并编写以下代码: 1. 配置HDFS 在application.properties文件中添加以下配置: ``` # HDFS配置 hadoop.hdfs.path=hdfs://localhost:9000 hadoop.hdfs.username=hadoop ``` 2. 配置MySQL 在application.properties文件中添加以下配置: ``` # MySQL配置 spring.datasource.url=jdbc:mysql://localhost:3306/test spring.datasource.username=root spring.datasource.password=root spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver ``` 3. 添加依赖项 在pom.xml文件中添加以下依赖项: ``` <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>3.2.1</version> </dependency> <dependency> <groupId>com.mysql.cj</groupId> <artifactId>mysql-connector-java</artifactId> <version>8.0.23</version> </dependency> ``` 4. 编写上传和下载代码 上传代码: ```java @Service public class HdfsService { @Value("${hadoop.hdfs.path}") private String hdfsPath; @Value("${hadoop.hdfs.username}") private String hdfsUsername; @Value("${spring.servlet.multipart.location}") private String uploadPath; @Autowired private FileSystem fileSystem; @Autowired private JdbcTemplate jdbcTemplate; public void upload(MultipartFile file) throws IOException { String fileName = file.getOriginalFilename(); String filePath = "/upload/" + fileName; Path path = new Path(hdfsPath + filePath); FSDataOutputStream outputStream = fileSystem.create(path); outputStream.write(file.getBytes()); outputStream.close(); jdbcTemplate.update("INSERT INTO file (name, path) VALUES (?, ?)", fileName, filePath); } } ``` 下载代码: ```java @Service public class HdfsService { @Value("${hadoop.hdfs.path}") private String hdfsPath; @Value("${hadoop.hdfs.username}") private String hdfsUsername; @Value("${spring.servlet.multipart.location}") private String uploadPath; @Autowired private FileSystem fileSystem; @Autowired private JdbcTemplate jdbcTemplate; public void download(HttpServletResponse response, String fileName) throws IOException { String filePath = jdbcTemplate.queryForObject("SELECT path FROM file WHERE name = ?", String.class, fileName); Path path = new Path(hdfsPath + filePath); FSDataInputStream inputStream = fileSystem.open(path); response.setContentType("application/octet-stream"); response.setHeader("Content-Disposition", "attachment; filename=\"" + fileName + "\""); IOUtils.copy(inputStream, response.getOutputStream()); response.flushBuffer(); } } ``` 以上代码将文件存储在HDFS中,并将文件名和路径保存到MySQL中。下载时,从MySQL中查询文件路径并将文件流发送到响应中。注意,在这里我们使用了Apache Commons IO库的IOUtils类来将文件流复制到响应中。 同时,我们还需要在控制器中编写上传和下载的端点: ```java @RestController public class FileController { @Autowired private HdfsService hdfsService; @PostMapping("/upload") public void upload(@RequestParam("file") MultipartFile file) throws IOException { hdfsService.upload(file); } @GetMapping("/download") public void download(HttpServletResponse response, @RequestParam("fileName") String fileName) throws IOException { hdfsService.download(response, fileName); } } ``` 现在,我们已经完成了Spring BootHDFS和MySQL的文件上传下载

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值