浅谈数据对接
一般来讲,数据对接就是将己方收集到的客户数据转化为平台方的规范要求的数据格式上传的一种方式。各个平台对于数据的要求格式各不相同。那怎么才能快速做好数据对接,而且重复劳动尽量少地完成,返工也要尽量少。我觉得要分几个步骤去进行。切记不能一头热栽在文档上,死命地对,很可能你的接口后期要崩死,考虑平台方的修改和己方数据的缺失或者并不完全符合的情况。
1.粗略看下对接文档和客户需求(这个必须看,不看后期等着改死吧)。特别对日期敏感,数据对接很可能为了验收 ,有一段日期里的数据特别重要。
2.了解收集到的客户数据,并将字段和了解到的对接文档字段一一对应。对于编码一定要重视,一般数据上传失败会在这里栽跟头。
3.等了解了己方数据库字段和文档的对应关系后,别急着下手。还有一步是一定要做的。查看数据库的垃圾数据,想必大家都知道,但凡数据大了之后,总有垃圾数据会在上传的时候影响上传的数据的准确性。当然会有人建立日志去记录,上传数据的成功率。但是去搜索日志需要耗费精力,还要去确定问题的根源,会浪费大量时间。与其去后期处理,不如预先做好预防,提前将垃圾数据给筛掉,保证数据上传的精确。
4.前面工作做完才能安心码代码编写接口将数据上传。
下面举个例子 (后台框架mybatis+mvc)
对接方式 :导出txt (昨天的数据) 再上传平台
后台sql
<select id="getMeatTzInHz" resultType="net.***.**.platform.meat.entity.MeatTzInHz">
SELECT
instock.slaughterCode,
instock.instockDate,
SUM(instock.checkInstockNum),
AVG(instock.price) AS price,
SUM(instock.quantityAndWeight) AS quantityAndWeight,
SUM(instock.transportDeathNum) AS transportDeathNum,
produceCode
FROM (
SELECT
COALESCE(trace_mainbody_employee.mainbody_record_info :: JSON ->> 'meatCommerceCode', '') AS slaughterCode,
--主体编码
COALESCE(trace_mainbody_employee.mainbody_name, '') AS slaughterName,
--主体名称
TO_CHAR(trace_instock.instock_date,'yyyyMMdd') AS instockDate,
--进场日期
COALESCE(substr(trace_mainbody_provider.mainbody_record_info :: JSON ->> 'commerceCode',1,9), '') AS ownerCode,
--货主编码
COALESCE(trace_mainbody_provider.mainbody_name, '') AS ownerName,
--货主名称
COALESCE(trace_batch.inspection_info :: JSON ->> 'CitypfTzPigCertCode', '') AS checkCertCode,
--生猪产地检疫证号,--json格式要得到CitypfTzPigCertCode
CAST(COALESCE(trace_batch.batch_other_info :: JSON ->> 'detectionQuantity', '0') AS int) AS checkInstockNum,
--检疫证进场数量,--json格式要得到CitypfTzQuarantineCount
COALESCE(trace_batch.batch_price, 0) AS price,
--批次单价
COALESCE(trace_batch.batch_quantity, 0) AS quantityAndWeight
FROM
t_trace_instock trace_instock --入库基础
INNER JOIN t_trace_instock_detail trace_instock_detail
ON (
trace_instock.ID = trace_instock_detail.instock_id
AND trace_instock_detail.del_flg = '0'
)
INNER JOIN t_trace_mainbody trace_mainbody_employee --企业表
ON (
trace_mainbody_employee.trace_mainbody_code = trace_instock.trace_mainbody_code
AND trace_mainbody_employee.mainbody_type in ('101')
AND trace_mainbody_employee.del_flg = '0'
AND trace_mainbody_employee.mainbody_record_info :: JSON ->> 'meatCommerceCode' != ''
<if test="division != null and division != ''">
AND split_part(trace_mainbody_employee.mainbody_record_info :: JSON ->> 'divisionName',',',2) like ('%' || #{division,jdbcType = VARCHAR} || '%')
</if>
<if test="mainBodyCode != null and mainBodyCode != ''">
AND main.mainbody_code = #{mainBodyCode,jdbcType = VARCHAR}
</if>
)
LEFT JOIN t_trace_mainbody trace_mainbody_provider --供应商表
ON (
trace_mainbody_provider.mainbody_code = trace_instock.supplier_code
AND trace_mainbody_provider.mainbody_type = '102'
AND trace_mainbody_provider.del_flg = '0'
)
INNER JOIN t_trace_batch trace_batch --批次表
ON (
trace_batch.batch_code = trace_instock_detail.batch_code
AND trace_batch.del_flg = '0'
)
WHERE
trace_mainbody_employee.mainbody_record_info :: JSON ->> 'nodeType' = '101'
AND trace_instock.del_flg = '0'
<![CDATA[
AND CAST(trace_instock.sys_reg_tmsp AS DATE) >= CAST(#{startTagSysRegTmsp,jdbcType = VARCHAR} AS DATE)
AND CAST(trace_instock.sys_reg_tmsp AS DATE) <= CAST(#{endTagSysRegTmsp,jdbcType = VARCHAR} AS DATE)
]]>
) instock
GROUP BY instock.slaughterCode, instock.instockDate, produceCode
</select>
实体
public class MeatTzInHz implements Serializable {
/**
* 屠宰厂编码
*/
protected String slaughterCode;
/**
* 进厂日期
*/
protected String instockDate;
/**
* 检疫证进场数量
*/
protected int checkInstockNum;
/**
* 采购价
*/
protected double price;
/**
* 实际数量和重量
*/
protected double quantityAndWeight;
/**
* 途亡数
*/
protected int transportDeathNum;
/**
* 产地编码
*/
protected String produceCode;
@Override
public String toString() {
return FileEnum.DIVIDE_COMMA.getSymbol() + slaughterCode +
FileEnum.DIVIDE_COMMA.getSymbol() + instockDate +
FileEnum.DIVIDE_COMMA.getSymbol() + checkInstockNum +
FileEnum.DIVIDE_COMMA.getSymbol() + price +
FileEnum.DIVIDE_COMMA.getSymbol() + quantityAndWeight +
FileEnum.DIVIDE_COMMA.getSymbol() + transportDeathNum +
FileEnum.DIVIDE_COMMA.getSymbol() + produceCode + FileEnum.ENTER_LINE_BREAK.getSymbol();
}
mapper
List<MeatTzOutHz> getMeatTzOutHz(DataCondition dataCondition);
service
public List<MeatTzInHz> getMeatTzInHz(DataCondition dataCondition) {
List<MeatTzInHz> meatTzInHzList = meatMapper.getMeatTzInHz(dataCondition);
//验证数据非空
if(CollectionUtils.isEmpty(meatTzInHzList)) {
return new ArrayList<>();
}
return meatTzInHzList;
}
导出service(方法名自写,只剖了方法体) ExportService
List<MeatTzOutHz> meatTzOutHzList = meatService.getMeatTzOutHz(dataCondition);
meatTzOutHzList.stream()
.filter(meatTzOutHz -> StringUtils.isNotBlank(meatTzOutHz.getSlaughterCode()))
.forEach(meatTzOutHz -> {
String fileName = txtFileUtil.getFileName(FileEnum.POSTFIX_MEAT_TZ_OUT_HZ_TXT.getSymbol(), meatTzOutHz.getSlaughterCode(), meatTzOutHz.getOutstockDate());
try {
txtFileUtil.writeFileToDateDirectory(meatTzOutHz.toString(), fileName);
} catch (IOException e) {
e.printStackTrace();
}
});
定时任务
大同小异
@Autowired
private ExportService exportService;
/**
* 秒 分 时 日 月 年
*/
@Scheduled(cron = "0 30 4,11 * * ? ")
private void grouppurchasing() {
List<MainBody> mainBodyList = nodeService.getMainBodyByNodeType(2);
if(CollectionUtils.isNotEmpty(mainBodyList)){
logger.info("生成数据文件开始!");
mainBodyList.forEach(mainBody -> {
logger.info("生成企业:{} 数据文件开始!", mainBody.getMainbodyName());
DataCondition dataCondition = new DataCondition();
dataCondition.setDivision("上海");
dataCondition.setStartTagSysRegTmsp(DateUtil.getYesterdayByDefultFormat());
dataCondition.setEndTagSysRegTmsp(DateUtil.getNowByDefultFormat());
//dataCondition.setNodeTypeList(Arrays.asList(PlatformConstant.NODE_TYPE_110));
dataCondition.setMainBodyCode(mainBody.getMainbodyCode());
baseNodeInfo(dataCondition ,true);
exportService.exportGpToTxt(dataCondition);
logger.info("生成企业:{} 数据文件结束!", mainBody.getMainbodyName());
});
logger.info("生成数据文件结束!");
}
}
工具类 fileEnum txtFileUtil
public enum FileEnum {
POSTFIX_MEAT_TZ_OUT_TXT("{0}_{1}_T2.TXT"),;
private String symbol;
FileEnum(String symbol) {
this.symbol = symbol;
}
public String getSymbol() {
return symbol;
}
}
public String getFileName(String pattern, Object... arguments) {
if (arguments == null || arguments.length == 0) {
return pattern;
}
return MessageFormat.format(pattern, arguments);
}
/**
* 写文件
*
* @param line
* @param fileName
* @throws IOException
*/
public void writeFileToDateDirectory(String line, String fileName) throws IOException {
String dateDir = DateUtil.getYesterdayByDefultFormat() + "/";
String dirFile = this.fileOutputDirectory + dateDir + fileName;
byte[] buff = line.getBytes(Charset.forName("GBK"));
FileOutputStream out = fileMap.get(dirFile);
if (out == null) {
File directoryFile = new File(this.fileOutputDirectory + dateDir);
File file = new File(dirFile);
if (!directoryFile.exists()) {
directoryFile.mkdirs();
}
// 文件不存在,创建
if (!file.exists()) {
file.createNewFile();
}
// append 参数为false,覆盖原来的文件
out = new FileOutputStream(file, false);
fileMap.put(dirFile, out);
}
out.write(buff);
}
DateUtil
private static Date add(Date date, int calendarField, int amount) {
if (date == null) {
throw new IllegalArgumentException("The date must not be null");
} else {
Calendar c = Calendar.getInstance();
c.setTime(date);
c.add(calendarField, amount);
return c.getTime();
}
}
/**
* 增加天
*
* @param date
* @param amount
* @return
*/
public static Date addDays(Date date, int amount) {
return add(date, 5, amount);
}
/**
* 获取昨天
* @return
*/
public static Date getYesterday() {
return addDays(getNow(), -1);
}
/**
* 把日期类型格式化成字符串
*
* @param date
* @param format
* @return
*/
public static String convert2String(Date date, String format) {
SimpleDateFormat formater = new SimpleDateFormat(format);
try {
return formater.format(date);
} catch (Exception e) {
return null;
}
}
/**
* 获取昨天的日期并转化为yyyyMMdd
* @return
*/
public static String getYesterdayByDefultFormat() {
return convert2String(getYesterday(), DATE_FORMAT_YYYYMMDD);
}
到这里,应该能生成txt文件了。还有一步上传。
平台方要求 生成zip 再用webservice的方式上传到平台
先po功能代码 ,再给工具类
/**
* webservice 上传zip文件(一天一次)
* 6:30 12:30
*/
@Scheduled(cron = "0 30 6,12 * * ? ")
private void uploadZipByWebservice() {
//打包
String zipPath = "/sh";
String zipName = DateUtil.getYesterdayByDefultFormat();
ZipUtils.createZipFile(zipPath,zipName);
//地址
String fileName = "sh" + zipName + ".zip";
Identity identity = new Identity();
ReqEntity<Identity> reqEntity = new ReqEntity<>();
identity.setFileName(fileName);
identity.setFileCreateTime(new SimpleDateFormat("yyyy-MM-dd").format(new Date()));
File file = new File("/data/"+ fileName);
identity.setFileSize(String.valueOf(file.length()));
reqEntity.setIdentity(identity);
reqUtil.sendXml(reqEntity);
}
看着很简洁 ,包含了三个工具类,和 两个实体类
实体类
@Data
//只检测属性不检测方法了
@JsonAutoDetect(fieldVisibility=JsonAutoDetect.Visibility.ANY, getterVisibility=JsonAutoDetect.Visibility.NONE)
public class Identity implements Serializable {
private String UserName;
private String Password;
private String FileName;
private String FileSize;
private String FileCreateTime;
}
父类
@Data
//只检测属性不检测方法了
@JsonAutoDetect(fieldVisibility=JsonAutoDetect.Visibility.ANY, getterVisibility=JsonAutoDetect.Visibility.NONE)
public class ReqEntity<Identity> implements Serializable {
private String pfileType;
private String pbuffer;
private String usercode;
private String userPass;
private Identity identity;
private String pErrmasg;
}
其实就是定义一些字段,啥也没写。=。=
工具类的话
生成压缩文件 zipUtil
/**
* 创建压缩文件
*/
public static void createZipFile(String saveFilePath ,String zipName) {
FileOutputStream fileOutputStream = null;
try {
File zipFile = new File(saveFilePath + zipName.replaceAll("(.*?)(/*)$","$1") + ".zip");
fileOutputStream = new FileOutputStream(zipFile);
ZipUtils.toZip(saveFilePath + "/"+zipName+"/", fileOutputStream, false);
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 压缩成ZIP 方法1
*
* @param srcDir 压缩文件夹路径
* @param out 压缩文件输出流
* @param KeepDirStructure 是否保留原来的目录结构,true:保留目录结构;
* false:所有文件跑到压缩包根目录下(注意:不保留目录结构可能会出现同名文件,会压缩失败)
* @throws RuntimeException 压缩失败会抛出运行时异常
*/
public static void toZip(String srcDir, OutputStream out, boolean KeepDirStructure)
throws RuntimeException {
long start = System.currentTimeMillis();
ZipOutputStream zos = null;
try {
zos = new ZipOutputStream(out);
File sourceFile = new File(srcDir);
compress(sourceFile, zos, sourceFile.getName(), KeepDirStructure);
long end = System.currentTimeMillis();
logger.info("压缩完成,耗时:" + (end - start) + " ms");
} catch (Exception e) {
throw new RuntimeException("zip error from ZipUtils", e);
} finally {
if (zos != null) {
try {
zos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* 递归压缩方法
*
* @param sourceFile 源文件
* @param zos zip输出流
* @param name 压缩后的名称
* @param KeepDirStructure 是否保留原来的目录结构,true:保留目录结构;
* false:所有文件跑到压缩包根目录下(注意:不保留目录结构可能会出现同名文件,会压缩失败)
* @throws Exception
*/
private static void compress(File sourceFile, ZipOutputStream zos, String name,
boolean KeepDirStructure) throws Exception {
byte[] buf = new byte[BUFFER_SIZE];
if (sourceFile.isFile()) {
// 向zip输出流中添加一个zip实体,构造器中name为zip实体的文件的名字
zos.putNextEntry(new ZipEntry(name));
// copy文件到zip输出流中
int len;
FileInputStream in = new FileInputStream(sourceFile);
while ((len = in.read(buf)) != -1) {
zos.write(buf, 0, len);
}
// Complete the entry
zos.closeEntry();
in.close();
} else {
File[] listFiles = sourceFile.listFiles();
if (listFiles == null || listFiles.length == 0) {
// 需要保留原来的文件结构时,需要对空文件夹进行处理
if (KeepDirStructure) {
// 空文件夹的处理
zos.putNextEntry(new ZipEntry(name + "/"));
// 没有文件,不需要文件的copy
zos.closeEntry();
}
} else {
for (File file : listFiles) {
// 判断是否需要保留原来的文件结构
if (KeepDirStructure) {
// 注意:file.getName()前面需要带上父文件夹的名字加一斜杠,
// 不然最后压缩包中就不能保留原来的文件结构,即:所有文件都跑到压缩包根目录下了
compress(file, zos, name + "/" + file.getName(), KeepDirStructure);
} else {
compress(file, zos, file.getName(), KeepDirStructure);
}
}
}
}
}
/**
* 根据文件路径读取文件转出 byte[]
*
* @param filePath 文件路径
* @return 字节数组
* @throws IOException
*/
public static byte[] getFileToByteArray(String filePath) throws IOException {
File file = new File(filePath);
long fileSize = file.length();
if (fileSize > Integer.MAX_VALUE) {
return null;
}
FileInputStream fileInputStream = new FileInputStream(file);
byte[] buffer = new byte[(int) fileSize];
int offset = 0;
int numRead = 0;
while (offset < buffer.length && (numRead = fileInputStream.read(buffer, offset, buffer.length - offset)) >= 0) {
offset += numRead;
}
// 确保所有数据均被读取
if (offset != buffer.length) {
logger.error("Could not completely read file : {}" , file.getName());
throw new IOException("Could not completely read file " + file.getName());
}
fileInputStream.close();
return buffer;
}
还有上传的一个工具类 reqUtil
@Value("${webservice.url}")
private String webUrl;
/**
* xml 上报方式
* @param reqEntity
* @return
*/
public String sendXml( ReqEntity<Identity> reqEntity) {
URL url = null;
String fileName = reqEntity.getIdentity().getFileName();
String fileSize = reqEntity.getIdentity().getFileSize();
String createTime = reqEntity.getIdentity().getFileCreateTime();
String fileStr = "";
String responsMess = "";
try {
url = new URL(webUrl);
reqUtilLogger.info("requestUrl:" + url);
HttpURLConnection con = (HttpURLConnection) url.openConnection();
byte[] zipOutput = ZipUtils.getFileToByteArray("/data/"+ fileName);
fileStr = new sun.misc.BASE64Encoder().encode(zipOutput);
//拼接好xml
StringBuffer sb = new StringBuffer();
sb.append("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
"<soap:Envelope xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" xmlns:soap=\"http://schemas.xmlsoap.org/soap/envelope/\">\n" +
" <soap:Header>\n" +
" <Identify xmlns=\"FjUpdate1\">\n" +
" <UserName>"+userName+"</UserName>\n" +
" <Password>"+password+"</Password>\n" +
" <FileName>"+fileName+"</FileName>\n" +
" <FileSize>"+fileSize+"</FileSize>\n" +
" <FileCreateTime>"+createTime+"</FileCreateTime>\n" +
" </Identify>\n" +
" </soap:Header>\n" +
" <soap:Body>\n" +
" <UpFile xmlns=\"FjUpdate1\">\n" +
" <pFileType>" + fileName.split("\\.")[0] + "</pFileType>\n" +
" <pBuffer>"+fileStr+"</pBuffer>\n" +
" <usercode>"+userName+"</usercode>\n" +
" <userpass>"+password+"</userpass>\n" +
" </UpFile>\n" +
" </soap:Body>\n" +
"</soap:Envelope>\n");
String xmlStr = sb.toString();
System.out.println(xmlStr);
//设置好header信息
con.setRequestMethod("POST");
con.setRequestProperty("content-type", "text/xml; charset=utf-8");
con.setRequestProperty("Content-Length", String.valueOf(xmlStr.getBytes().length));
// con.setRequestProperty("soapActionString", soapActionString);
//post请求需要设置
con.setDoOutput(true);
con.setDoInput(true);
//对请求body 往里写xml 设置请求参数
OutputStream ops = con.getOutputStream();
ops.write(xmlStr.getBytes());
ops.flush();
ops.close();
//设置响应回来的信息
InputStream ips = con.getInputStream();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buf = new byte[1024];
int length = 0;
while( (length = ips.read(buf)) != -1){
baos.write(buf, 0, length);
baos.flush();
}
byte[] responsData = baos.toByteArray();
baos.close();
//处理写响应信息
responsMess = new String(responsData,"utf-8");
reqUtilLogger.info("响应报文:"+ responsMess);
reqUtilLogger.info(String.valueOf(con.getResponseCode()));
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return responsMess;
}
这里@value的参数就不po了,大家懂的自然懂,怎么配置。
userName和password自行定义即可。
这样,就大功告成了。(^-^)V。
很多人会问,用这么多类,你的 maven需要导入很多依赖包
基础的mybatis,springframework等的我就不po了,大家都知道的。然后就是正常的io依赖也没啥特别的。自行解决就好。