HdfsUtils

package com.zhiyou.db17;


import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;




public class HdfsUtils {


public static final Configuration CONF = new Configuration();
public static FileSystem hdfs;
static {
try {
hdfs = FileSystem.get(CONF);
} catch (Exception e) {
System.out.println("无法连接hdfs,请检查配置。");
e.printStackTrace();
}
}

// 在hdfs 上创建一个新的文件,将某些数据写入到hdfs中
public static void createFile(String fileName,String content) throws IOException{
Path path = new Path(fileName);
if (hdfs.exists(path)) {
System.out.println("文件:"+fileName+"在hdfs上已存在");
}else{
FSDataOutputStream outputStream = hdfs.create(path);
outputStream.writeUTF(content);
outputStream.flush();
outputStream.close();
}
}

// 读取hdfs 上的文件
public static void readFile(String fileName) throws IOException{
Path path = new Path(fileName);
if (!hdfs.exists(path)|hdfs.isDirectory(path)) {
System.out.println("给定路径:"+fileName+"不存在,或者不是一个文件");
}else {
FSDataInputStream inputStream = hdfs.open(path);
String content = inputStream.readUTF();
System.out.println(content);
}
}

// 删除hdfs 上已有的文件
public static void deleteFile(String fileName) throws IOException{
Path path = new Path(fileName);
if (!hdfs.exists(path)) {
System.out.println("给定路径:"+fileName+"不存在");
}else {
hdfs.delete(path,true);
}
}

//把windows本地的文件上传到hdfs上
public static void uploadFile(String fileName,String hdfsPath) throws IOException{
Path src = new Path(fileName);
Path dst = new Path(hdfsPath);
hdfs.copyFromLocalFile(src, dst);
}

//把hdfs的文件下载到windows上
public static void downloadFile(String fileName,String localPath) throws Exception{
Path src = new Path(fileName);
Path dst = new Path(localPath);

// 这个方法需要配置环境变量
hdfs.copyToLocalFile(src, dst);
// 这个不需要配置环境变量,false表示src下载后不被删除,true 表示下载到本机路径
hdfs.copyToLocalFile(false, src, dst, true);
}

//从本机上传文件到hdfs,采用读写的方式
public static void uploadFile2(String fileName,String hdfsPath) throws IOException{

Path path = new Path(hdfsPath);
if(hdfs.exists(path)){


System.out.println("文件在hdfs 上面已经存在,写入会覆盖原来的数据");
}else {

FileInputStream inputStream = new FileInputStream(fileName);

FSDataOutputStream dataOutputStream = hdfs.create(path);

byte[] bytes = new byte[5];

int length = 0;

while((length = inputStream.read(bytes)) != -1){

dataOutputStream.write(bytes, 0, length);

dataOutputStream.flush();
}


dataOutputStream.close();

inputStream.close();
}
}

// 从hdfs下载文件到本机,采用读写的方式
public static void downloadFile2(String hdfsFile,String localPath) throws IOException{

Path path = new Path(hdfsFile);

if (!hdfs.exists(path)|hdfs.isDirectory(path)) {

System.out.println("给定路径:"+hdfsFile+"不存在,或者不是一个文件");
}else {

FSDataInputStream dataInputStream = hdfs.open(path);

FileOutputStream outputStream = new FileOutputStream(localPath);

int length = 5;

byte[] bytes = new byte[length];

while (dataInputStream.read(bytes, 0, length)!=-1) {

outputStream.write(bytes, 0, bytes.length);
outputStream.flush();
}

outputStream.close();
dataInputStream.close();

}
}

//查看hdfs某个文件的状态
public static void getFileStatus(String fileName) throws Exception{
Path path = new Path(fileName);
FileStatus[] status = hdfs.listStatus(path);
for (FileStatus fileStatus : status) {
System.out.println(fileStatus);
}


}

//给一个目录的路径,递归的列出该目录下面所有的文件的状态信息(不包括文件夹信息)方式1
public static void getALLFileStatus(String fileName) throws Exception{
Path path = new Path(fileName);
FileStatus[] status = hdfs.listStatus(path);
for (FileStatus fileStatus : status) {
if (fileStatus.isDirectory()) {
getALLFileStatus(fileStatus.getPath().toString());
}else {
System.out.println(fileStatus);
}
}
}

//给一个目录的路径,递归的列出该目录下面所有的文件的状态信息(不包括文件夹信息)方式2
public static void getALLFileStatus2(String fileName) throws Exception{

Path path = new Path(fileName);
if (hdfs.isDirectory(path)) {
FileStatus[] status = hdfs.listStatus(path);
for (FileStatus fileStatus : status) {
getALLFileStatus2(fileStatus.getPath().toString());
}
}else{
FileStatus s1 = hdfs.getFileLinkStatus(path);
System.out.println(s1);
}
}

// 上传到文件hdfs
public static void uploadToHdfs(String localPath,String hdfsPath) throws IOException {

Path path = new Path(hdfsPath);
if (!hdfs.exists(path)) {
FSDataOutputStream outputStream = hdfs.create(path);
outputStream.close();
}

FileReader fr = new FileReader(localPath);

FSDataOutputStream fsout = hdfs.append(path);

char[] chars = new char[5];
while (fr.read(chars)!=-1) {

fsout.writeUTF(String.valueOf(chars));
}

fr.close();
fsout.close();
}

// 下载文件到local
public static void downToLocal(String hdfsPath,String localPath) throws IOException{
Path path = new Path(hdfsPath);
FSDataInputStream fsinput = hdfs.open(path);
String content = fsinput.readUTF();

FileWriter fw = new FileWriter(localPath);
fw.write(content);

fw.close();
fsinput.close();
}


public static void main(String[] args) throws Exception {
String content = "hello hdfs";
String fileName = "/bd17/11";
// createFile(fileName, content);
// readFile(fileName);
// deleteFile("/bd17");
// uploadFile("C:\\Users\\Administrator\\Desktop\\123.txt", "/bd17/66.txt");
// downloadFile(fileName, "C:\\Users\\Administrator\\Desktop\\5.txt");
// getFileStatus("/");
// getALLFileStatus("/");
// getALLFileStatus2("/");
// uploadToHdfs("C:\\Users\\Administrator\\Desktop\\00", fileName);
downToLocal(fileName, "C:\\Users\\Administrator\\Desktop\\22");
}

}
  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
下面是Spring Boot集成HDFS和MySQL实现文件上传和下载的示例代码: 1. 配置文件 application.properties: ``` #HDFS配置 hadoop.hdfs.uri=hdfs://localhost:9000 hadoop.hdfs.user.name=hadoop #MySQL配置 spring.datasource.url=jdbc:mysql://localhost:3306/test?useUnicode=true&characterEncoding=utf-8&useSSL=false&serverTimezone=UTC spring.datasource.username=root spring.datasource.password=root spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver #上传文件存储路径 upload.path=/usr/local/uploads/ ``` 2. 实体类 FileEntity.java: ```java @Entity @Table(name = "file") @Data public class FileEntity { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @Column(nullable = false) private String name; @Column(nullable = false) private String path; @Column(nullable = false) private String type; @Column(nullable = false) private Long size; @Column(name = "create_time", nullable = false) private Date createTime; } ``` 3. HDFS工具类 HdfsUtils.java: ```java @Component public class HdfsUtils { @Value("${hadoop.hdfs.uri}") private String hdfsUri; @Value("${hadoop.hdfs.user.name}") private String hdfsUserName; private FileSystem fileSystem; @PostConstruct public void init() throws IOException { Configuration configuration = new Configuration(); configuration.set("fs.defaultFS", hdfsUri); fileSystem = FileSystem.get(configuration); } public void uploadFile(String srcPath, String destPath) throws IOException { Path src = new Path(srcPath); Path dst = new Path(destPath); fileSystem.copyFromLocalFile(src, dst); } public void downloadFile(String srcPath, String destPath) throws IOException { Path src = new Path(srcPath); Path dst = new Path(destPath); fileSystem.copyToLocalFile(src, dst); } public void deleteFile(String path) throws IOException { Path src = new Path(path); fileSystem.delete(src, true); } } ``` 4. 文件服务接口 FileService.java: ```java public interface FileService { FileEntity save(MultipartFile file) throws IOException; Resource loadFileAsResource(Long id) throws FileNotFoundException; void delete(Long id) throws IOException; Page<FileEntity> findByPage(int pageNum, int pageSize); } ``` 5. 文件服务实现类 FileServiceImpl.java: ```java @Service public class FileServiceImpl implements FileService { private final String uploadPath = System.getProperty("user.dir") + "/uploads/"; @Autowired private FileRepository fileRepository; @Autowired private HdfsUtils hdfsUtils; @Override public FileEntity save(MultipartFile file) throws IOException { String fileName = file.getOriginalFilename(); String fileType = fileName.substring(fileName.lastIndexOf(".") + 1); String filePath = uploadPath + fileName; File destFile = new File(filePath); file.transferTo(destFile); String hdfsFilePath = "/upload/" + fileName; hdfsUtils.uploadFile(filePath, hdfsFilePath); FileEntity fileEntity = new FileEntity(); fileEntity.setName(fileName); fileEntity.setPath(hdfsFilePath); fileEntity.setType(fileType); fileEntity.setSize(file.getSize()); fileEntity.setCreateTime(new Date()); return fileRepository.save(fileEntity); } @Override public Resource loadFileAsResource(Long id) throws FileNotFoundException { FileEntity fileEntity = fileRepository.findById(id).orElseThrow(() -> new FileNotFoundException("文件不存在")); String filePath = fileEntity.getPath(); String fileName = fileEntity.getName(); Path path = new Path(filePath); try { FileSystem fs = FileSystem.get(URI.create(hdfsUtils.getHdfsUri()), hdfsUtils.getConfiguration(), hdfsUtils.getHdfsUserName()); FSDataInputStream inputStream = fs.open(path); return new InputStreamResource(inputStream); } catch (IOException e) { e.printStackTrace(); throw new FileNotFoundException(e.getMessage()); } } @Override public void delete(Long id) throws IOException { FileEntity fileEntity = fileRepository.findById(id).orElseThrow(() -> new FileNotFoundException("文件不存在")); String filePath = fileEntity.getPath(); hdfsUtils.deleteFile(filePath); fileRepository.deleteById(id); } @Override public Page<FileEntity> findByPage(int pageNum, int pageSize) { Pageable pageable = PageRequest.of(pageNum, pageSize, Sort.Direction.DESC, "createTime"); return fileRepository.findAll(pageable); } } ``` 6. 文件控制器 FileController.java: ```java @RestController @RequestMapping("/file") public class FileController { @Autowired private FileService fileService; @PostMapping public ResponseEntity<?> uploadFile(@RequestParam("file") MultipartFile file) throws IOException { FileEntity fileEntity = fileService.save(file); URI location = ServletUriComponentsBuilder.fromCurrentRequest().path("/{id}") .buildAndExpand(fileEntity.getId()).toUri(); return ResponseEntity.created(location).build(); } @GetMapping("/{id}") public ResponseEntity<Resource> downloadFile(@PathVariable Long id) throws FileNotFoundException { Resource resource = fileService.loadFileAsResource(id); return ResponseEntity.ok().header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + resource.getFilename() + "\"").body(resource); } @DeleteMapping("/{id}") public ResponseEntity<?> deleteFile(@PathVariable Long id) throws IOException { fileService.delete(id); return ResponseEntity.noContent().build(); } @GetMapping("/list") public ResponseEntity<Page<FileEntity>> getList(@RequestParam(defaultValue = "0") int pageNum, @RequestParam(defaultValue = "10") int pageSize) { Page<FileEntity> page = fileService.findByPage(pageNum, pageSize); return ResponseEntity.ok(page); } } ``` 7. 文件存储仓库 FileRepository.java: ```java public interface FileRepository extends JpaRepository<FileEntity, Long> { } ``` 这样,文件上传和下载的功能就完成了。启动应用程序,可以使用POSTMAN或其他客户端上传、下载和删除文件了。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值