HDFS+JavaWeb实现简易数据云盘

利用HDFS和JavaWeb实现数据云盘初步开发

1.index.jsp

<%@ page language="java" contentType="text/html; charset=UTF-8"
pageEncoding="UTF-8"%>
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" 
"http://www.w3.org/TR/html4/loose.dtd">
<%@page import="org.apache.hadoop.fs.FileStatus"%>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Insert title here</title>
</head>
<body style="text-align: center; margin-bottom: 100px;">
<div class="navbar">
<div class="navbar-inner">
<a class="brand" href="#" style="margin-center: 200px;" font="50px">网盘</a>
</div>
</div>
<div
style="margin: 0px auto; text-align: left; width: 1200px; height: 50px;">
<form class="form-inline" method="POST" enctype="MULTIPART/FORM-DATA"
action="UploadServlet">
<div style="line-height: 50px; float: left;">
<input type="submit" name="submit" value="上传文件" />
</div>
<div style="line-height: 50px; float: left;">
<input type="file" name="file1" size="30" />
</div>
</form>
</div>
<div
style="margin: 0px auto; width: 1200px; height: 500px; background: #fff">
<table class="table table-hover"
style="width: 1000px; margin-left: 100px;">
<tr style="border-bottom: 2px solid #ddd">
<td>文件名</td>
<td style="width: 100px">类型</td>
<td style="width: 100px;">大小(KB)</td>
<td style="width: 100px;">操作</td>
<td style="width: 100px;">操作</td>
</tr>
<%
FileStatus[] list = (FileStatus[])request.getAttribute("list");
if(list != null)
for (int i=0; i<list.length; i++) {
%>
<tr style="border-bottom: 1px solid #eee">
<%
if(list[i].isDir())
{
out.print("<td> <a href=\"UploadServlet?filePath="+list[i].getPath()+"\">"+list[i].getPath().getName()+"</a></td>");
}else{
out.print("<td>"+list[i].getPath().getName()+"</td>");
}
%>
<td><%= (list[i].isDir()?"目录":"文件") %></td>
<td><%= list[i].getLen()/1024%></td>
<td><a
href="DeleteFileServlet?filePath=<%=java.net.URLEncoder.encode(list[i].getPath().toString(),
"GB2312") %>"
class="delete">删除</a></td>
<td><a
href="DownloadServlet?filePath=<%=java.net.URLEncoder.encode(list[i].getPath().toString(),
"GB2312") %>">下载
</a>
</td>
</tr>
<%
}
%>
</table>
</div>
</body>
</body>
</html>

2、HDFSDao.java

 HDFSDao.java,用于对 HDFS 文件系统访问。
package com.hadoop.yunpan.model;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
public class HDFSDao {
private final Log log = LogFactory.getLog(HDFSDao.class);
// HDFS 访问地址
45
private static final String HDFS_PATH = "hdfs://master:9000/user/hadoop";
public HDFSDao(Configuration conf) {
this(HDFS_PATH, conf);
}
public HDFSDao(String hdfs, Configuration conf) {
this.hdfsPath = hdfs;
this.conf = conf;
}
// hdfs 路径
private String hdfsPath;
// Hadoop 系统配置
private Configuration conf;
// 加载 Hadoop 配置文件
public static JobConf getConfig() {
JobConf conf = new JobConf(HDFSDao.class);
conf.setJobName("HdfsDAO");
conf.addResource("classpath:/hadoop/core-site.xml");
conf.addResource("classpath:/hadoop/hdfs-site.xml");
conf.addResource("classpath:/hadoop/mapred-site.xml");
return conf;
}
// 在根目录下创建文件夹
public void mkdirs(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
if (!fs.exists(path)) {
fs.mkdirs(path);
System.out.println("Create: " + folder);
}
fs.close();
}
// 某个文件夹的文件列表
public FileStatus[] ls(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
System.out.println("ls: " + folder);
System.out
46
.println("==========================================================");
if (list != null)
for (FileStatus f : list) {
System.out.println(f.getPath().getName() + ", folder: "
+ (f.isDir() ? "目录" : "文件") + ", 大小: " + f.getLen()
/ 1024 + "\n");
}
System.out
.println("==========================================================");
fs.close();
return list;
}
public void copyFile(String local, String remote) throws IOException {
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
// remote---/用户/用户下的文件或文件夹
fs.copyFromLocalFile(new Path(local), new Path(remote));
System.out.println("copy from: " + local + " to " + remote);
fs.close();
}
// 删除文件或文件夹
public void rmr(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.deleteOnExit(path);
System.out.println("Delete: " + folder);
fs.close();
}
// 下载文件到本地系统
public void download(String remote, String local) throws IOException {
Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.copyToLocalFile(path, new Path(local));
System.out.println("download: from" + remote + " to " + local);
fs.close();
}
/**
* @param args
*/
47
public static void main(String[] args) throws IOException {
// TODO Auto-generated method stub
JobConf conf = getConfig();
HDFSDao hdfs = new HDFSDao(conf);
hdfs.ls("hdfs://master:9000/user/hadoop");
} }

3、UploadServlet.java

package com.hadoop.yunpan.controller;

import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;
import com.hadoop.yunpan.model.HDFSDao;
/**
* Servlet implementation class UploadServlet
*/
//@WebServlet(name="UploadServlet",urlPatterns="/UploadServlet")
public class UploadServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private final Log log = LogFactory.getLog(UploadServlet.class);
 private final int MAX_FILE_SIZE = 50 * 1024 * 1024; // 50M
48
 private final int MAX_MEM_SIZE = 50 * 1024 * 1024; // 50M
 private String fileUploadPath;
 /**
 * @see HttpServlet#HttpServlet()
 */
 public UploadServlet() {
 super();
 // TODO Auto-generated constructor stub
 }
 
 protected void doGet(HttpServletRequest request,HttpServletResponse response)throws 
ServletException,IOException {
 this.doPost(request, response);
}
/**
* @see Servlet#init(ServletConfig)
*/
public void init(ServletConfig config) throws ServletException {
// TODO Auto-generated method stub
// 重写 了 Servlet 的 init 方 法后 一定要记 得调 用父 类 的 init 方 法, 否则 在
service/doGet/doPost 方法中使用 getServletContext()方法
//获取 ServletContext 对象时就会出现 java.lang.NullPointerException 异常
super.init(config);
System.out.println("init UploadServlet");
ServletContext context = getServletContext();
this.fileUploadPath = context.getInitParameter("file-upload");
System.out.println("source file path:" + fileUploadPath + "");
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws 
ServletException, IOException {
// TODO Auto-generated method stub
request.setCharacterEncoding("UTF-8");
File file;
JobConf conf = HDFSDao.getConfig();
HDFSDao hdfs = new HDFSDao(conf);
// 验证上传内容了类型
String contentType = request.getContentType();
49
if ((contentType.indexOf("multipart/form-data") >= 0)) {
DiskFileItemFactory factory = new DiskFileItemFactory();
// 设置内存中存储文件的最大值
factory.setSizeThreshold(MAX_MEM_SIZE);
// 本地存储的数据大于 maxMemSize.
factory.setRepository(new File("/tmp"));
// 创建一个新的文件上传处理程序
ServletFileUpload upload = new ServletFileUpload(factory);
// 设置最大上传的文件大小
upload.setSizeMax(MAX_FILE_SIZE);
try {
// 解析获取的文件
List<FileItem> fileList = upload.parseRequest(request);
// 处理上传的文件
Iterator<FileItem> iterator = fileList.iterator();
System.out.println("begin to upload file to tomcat server</p>");
while (iterator.hasNext()) {
FileItem item = iterator.next();
if (!item.isFormField()) {
// 获取上传文件的参数
String fileName = item.getName();
String fn = fileName.substring(fileName.lastIndexOf("\\") + 1);
System.out.println("<br>" + fn + "<br>");
// 写入文件
if (fileName.lastIndexOf("\\") >= 0) {
file = new File(fileUploadPath, 
fileName.substring(fileName.lastIndexOf("\\")));
} else {
file = new File(fileUploadPath, 
fileName.substring(fileName.lastIndexOf("\\") + 1));
}
item.write(file);
System.out.println("upload file to tomcat server success!");
System.out.println("begin to upload file to hadoop hdfs</p>");
String name = fileUploadPath+File.separator+fn;
System.out.println(name);
 hdfs.copyFile(fileUploadPath+File.separator+fn, "/user/hadoop/"+fn);
System.out.println("upload file to tomcat server success!");
} }
50
FileStatus[] list = hdfs.ls("hdfs://master:9000/user/hadoop");
request.setAttribute("list", list);
request.getRequestDispatcher("index.jsp").forward(request,response);
} catch (Exception ex) {
System.out.println(ex.getMessage());
}
} else {
System.out.println("<p>No file uploaded</p>");
} } }



4、DownloadServlet.java


package com.hadoop.yunpan.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;
import com.hadoop.yunpan.model.HDFSDao;
/**
* Servlet implementation class DownloadServlet
*/
//@WebServlet("/DownloadServlet")
public class DownloadServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws 
ServletException, IOException {
// TODO Auto-generated method stub
this.doPost(request,response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws 
ServletException, IOException {
// TODO Auto-generated method stub
String local = "/home/hadoop/Downloads";
String filePath = new 
String(request.getParameter("filePath").getBytes("ISO-8859-1"),"GB2312");
System.out.println(filePath);
JobConf conf = HDFSDao.getConfig();
HDFSDao hdfs = new HDFSDao(conf);
hdfs.download(filePath, local);
FileStatus[] list = hdfs.ls("hdfs://master:9000/user/hadoop");
request.setAttribute("list", list);
request.getRequestDispatcher("index.jsp").forward(request, response);
} }

5、DeleteFileServlet.java


package com.hadoop.yunpan.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;
import com.hadoop.yunpan.model.HDFSDao;
/**
* Servlet implementation class DeleteFileServlet
*/
//@WebServlet(name="DeleteFileServlet",urlPatterns="/DeleteFileServlet")
public class DeleteFileServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws 
ServletException, IOException {
// TODO Auto-generated method stub
this.doPost(request, response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws 
ServletException, IOException {
// TODO Auto-generated method stub
String filePath = new 
String(request.getParameter("filePath").getBytes("ISO-8859-1"),"GB2312");
JobConf conf = HDFSDao.getConfig();
HDFSDao hdfs = new HDFSDao(conf);
hdfs.rmr(filePath);
System.out.println("====="+filePath+"=====");
FileStatus[] list = hdfs.ls("hdfs://master:9000/user/hadoop");
request.setAttribute("list", list);
request.getRequestDispatcher("index.jsp").forward(request, response);
} }

6、web.xml配置

<?xml version="1.0" encoding="UTF-8"?>
<web-app xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://java.sun.com/xml/ns/javaee"
xmlns:web="http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd"
xsi:schemaLocation="http://java.sun.com/xml/ns/javaee 
http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd" id="WebApp_ID" version="3.0">
 <display-name>webtest1</display-name>
 <welcome-file-list>
 <welcome-file>index.html</welcome-file>
 <welcome-file>index.htm</welcome-file>
 <welcome-file>index.jsp</welcome-file>
 <welcome-file>default.html</welcome-file>
 <welcome-file>default.htm</welcome-file>
 <welcome-file>default.jsp</welcome-file>
 </welcome-file-list>
 <context-param>
 <description>Location to store uploaded file</description>
 <param-name>file-upload</param-name>
 <param-value>/home/hadoop/tmp</param-value>
 </context-param>
 <servlet>
 <description></description>
 <display-name>UploadServlet</display-name>
 <servlet-name>UploadServlet</servlet-name>
 <servlet-class>com.hadoop.yunpan.controller.UploadServlet</servlet-class>
 </servlet>
 <servlet-mapping>
 <servlet-name>UploadServlet</servlet-name>
 <url-pattern>/UploadServlet</url-pattern>
 </servlet-mapping>
 
 <servlet>
 <description></description>
 <display-name>DeleteFileServlet</display-name>
 <servlet-name>DeleteFileServlet</servlet-name>
 <servlet-class>com.hadoop.yunpan.controller.DeleteFileServlet</servlet-class>
 </servlet>
 <servlet-mapping>
 <servlet-name>DeleteFileServlet</servlet-name>
 <url-pattern>/DeleteFileServlet</url-pattern>
 </servlet-mapping>
 
 <servlet>
 <description></description>
 <display-name>DownloadServlet</display-name>
 <servlet-name>DownloadServlet</servlet-name>
 <servlet-class>com.hadoop.yunpan.controller.DownloadServlet</servlet-class>
 </servlet>
 <servlet-mapping>
 <servlet-name>DownloadServlet</servlet-name>
 <url-pattern>/DownloadServlet</url-pattern>
 </servlet-mapping>
</web-app

在启动Tomcat服务器时,因为之前在WEB-INF的lib目录下加入了hadoop的全部jar包,在启动项目时,会出现报错:

java.lang.NoSuchMethodError: org.eclipse.jdt.internal.compiler.CompilationResult.getProblems()

在查询资料之后,我找到了原因:
hadoop相关jar中依赖jetty、servlet等第三方jar包和当前项目中的有冲突
解决方法一:

以这个项目为例自己手工在lib加入的话,哪就要将这几个包去掉,目前我就是这样做的,
去掉之后就可以运行(有具体版本号,我没写,但是前面都一样)
  jersey-server-1.9.jar
  jasper-runtime-5.5.23.jar
  jasper-compiler-5.5.23.jar
  servlet-api.jar
  jsp-api.jar
  jetty-util.jar
  jetty.jar

解决方法二:

如果是用maven管理项目,在pom.xml中删除以下依赖就可以了,但我没有试过
<dependency>
  <groupId>org.apache.hadoop</groupId>
  <artifactId>hadoop-common</artifactId>
  <version>${hadoop.version}</version>
    <exclusions>
    <exclusion>
        <groupId>org.mortbay.jetty</groupId>
        <artifactId>jetty</artifactId>
    </exclusion>
    <exclusion>
        <groupId>org.mortbay.jetty</groupId>
        <artifactId>jetty-util</artifactId>
    </exclusion>
    <exclusion>
        <groupId>org.mortbay.jetty</groupId>
        <artifactId>jsp-2.1</artifactId>
    </exclusion>
    <exclusion>
        <groupId>org.mortbay.jetty</groupId>
        <artifactId>jsp-api-2.1</artifactId>
    </exclusion>
    <exclusion>
        <groupId>org.mortbay.jetty</groupId>
        <artifactId>servlet-api-2.1</artifactId>
    </exclusion>
    <exclusion>
        <groupId>javax.servlet</groupId>
        <artifactId>servlet-api</artifactId>
    </exclusion>
    <exclusion>
        <groupId>javax.servlet.jsp</groupId>
        <artifactId>jsp-api</artifactId>
    </exclusion>
    <exclusion>
        <groupId>tomcat</groupId>
        <artifactId>jasper-compiler</artifactId>
    </exclusion>
    <exclusion>
        <groupId>tomcat</groupId>
        <artifactId>jasper-runtime</artifactId>
    </exclusion>
  </exclusions>
</dependency>

最后
完成的效果图如下
在这里插入图片描述

源码打包链接:
链接:https://pan.baidu.com/s/1v4eaMFRgqm1LYt-IwgzGnA
提取码:qzbs

  • 6
    点赞
  • 47
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值