String tenantName = System.getenv("OPENPALETTE_NAMESPACE"); String dataIntegrationServiceName = "dataintegration-ms"; @Override public void importData(MultipartFile file, DataImportRecordDTO request) throws Exception { log.info("数据导入开始。"); log.info("数据导入参数:" + JSONUtil.toJson(request)); // 拼装URL String url = "http://" + dataIntegrationServiceName + "-" + tenantName + ":10258/dataintegration/file/importdata" + "?dbName=" + URLEncoder.encode(request.getDbName(), "utf-8") + "&tableName=" + URLEncoder.encode(request.getTableName(), "utf-8") + "&partitions=" + URLEncoder.encode(request.getPartitionConfig(), "utf-8") + "&isOverWrite=" + URLEncoder.encode(request.getOverwrite().toString(), "utf-8") + "&clusterName=" + URLEncoder.encode(request.getClusterName(), "utf-8") + "&sparkSourceName=" + URLEncoder.encode(request.getSparkSourceName(), "utf-8"); log.info("URL: " + url); // 设置Rest请求参数 CloseableHttpClient httpClient = HttpClients.createDefault(); HttpPost httpPost = new HttpPost(url); httpPost.addHeader("Authorization", "administrator:SdzV4wj1ufh3+X1PgIQXj7ld9gc="); HttpEntity entity = MultipartEntityBuilder.create() .setCharset(Consts.UTF_8) .addBinaryBody("file", file.getInputStream(), ContentType.MULTIPART_FORM_DATA, file.getOriginalFilename()) .build(); httpPost.setEntity(entity); // 执行文件上传 HttpResponse response; DataImportRecord record = request.toDataImportRecord(); record.setFileName(file.getOriginalFilename()); record.setFileSize(file.getSize()); String errorMsg = ""; try { response = httpClient.execute(httpPost); log.info("接口返回信息:" + response); if (response.getStatusLine().getStatusCode() != 200) { log.error("数据导入失败!"); errorMsg = response.toString(); record.setIsSuccess(false); } else { log.info("数据导入成功。"); record.setIsSuccess(true); } } catch (Exception e) { log.error("数据导入失败!"); record.setIsSuccess(false); errorMsg = e.getMessage() + "\n" + Arrays.stream(e.getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("\n")); throw e; } finally { // 插入操作记录 log.info("插入数据导入记录。"); record.setFailMessage(errorMsg); record.setCreateTime(new Date()); record.setId(SnowflakeIdWorker.newId()); dataImportDao.insert(record); } } @Override public DataImportEnvironmentInfoDTO getDataImportEnvironmentInfo(Long tableId, String envType) throws Exception { // 获取表信息 Table table = tableDao.findByID(tableId); if (table == null) throw new BusinessException(BusinessExceptionEnums.PARAM_ERROR.getCode(), "未找到表信息,表ID:" + tableId); // 获取环境信息 EnvironmentInfo envInfo = getEnvironmentInfo(table.getBusinessInformation().getProjectID(), EnvironmentEnum.valueOf(envType)); if (envInfo == null) throw new BusinessException(BusinessExceptionEnums.PARAM_ERROR.getCode(), "未找到环境信息,项目ID:" + table.getBusinessInformation().getProjectID()); DataImportEnvironmentInfoDTO data = new DataImportEnvironmentInfoDTO(); data.setDbName(table.getDatabase()); data.setClusterName(envInfo.getClusterName()); data.setSparkSourceName(envInfo.getSourceData().getSourceName()); return data; }
关于使用HTTP上传文件到hdfs文件系统
最新推荐文章于 2022-12-01 20:28:26 发布