最近需要spark进行文件处理,需要提交数据到spark上就想着通过http 把文件上传到hdfs上,在进行数据计算
public class HDFSCommon {
static public void copyfileToHdfs(Configuration config, String url, File file)
{
try {
FileSystem fs = FileSystem.get(URI.create(url), config);
Path src = new Path(file.getPath());
// 要上传到hdfs的目标路径
Path dst = new Path(url+"/test");
fs.copyFromLocalFile(src, dst);
}
catch(Exception e)
{
e.printStackTrace();
}
}
}
public class CommonUtil {
public static void inputStreamToFile(InputStream ins,File file) {
try {
OutputStream os = new FileOutputStream(file);
int bytesRead = 0;
byte[] buffer = new byte[8192];
while ((bytesRead = ins.read(buffer, 0, 8192)) != -1) {
os.write(buffer, 0, bytesRead);
}
os.close();
ins.close();
} catch (Exception e) {
e.printStackTrace();
}
}
static public File MultipartFileToFile(MultipartFile file)
{
File f = null;
try {
if (file.equals("") || file.getSize() <= 0) {
file = null;
} else {
InputStream ins = file.getInputStream();
f = new File(file.getOriginalFilename());
inputStreamToFile(ins, f);
}
return f;
}
catch(Exception e)
{
e.printStackTrace();
return f;
}
}
}
@Controller
@CrossOrigin
public class FileController {
@PostMapping("OneController/getFileToHDFS")
@ApiOperation("通过id获取某个地址信息")
public void getFileToHDFS(@RequestParam("file") MultipartFile file){
try {
if (file != null) {
File hh = CommonUtil.MultipartFileToFile(file);
HDFSCommon.copyfileToHdfs(new Configuration(),"hdfs://10.0.0.194:9000",hh);
}
}
catch(Exception io)
{
io.printStackTrace();
}
}
}