private String systemIdToken; private String url; private String clusterName; private String topic; private int consumeThreadCount = 4; private String compareTable; private Timer printTimer = new Timer(); private HitCount hitCount = null; private long printRate = (1000 * 60 * 60); private static final Logger logDQ = LoggerFactory.getLogger(AuditDataCompare.class); private DataServiceNew<EXP5WaybillRecord> proDataServiceNew = null; private DataServiceNew<EXP5WaybillRecord> stgDataServiceNew = null; static String[] bigProRes = {"classpath:DQ/bigPro-hbase.xml", "classpath:DQ/beans.xml"}; static String[] stgRes = {"classpath:DQ/stg-hbase.xml", "classpath:DQ/beans.xml" }; private ClassPathXmlApplicationContext context = null; public static void main(String[] args) throws IOException { Properties props = new Properties(); InputStream in = Object.class.getResourceAsStream("/DQ/config.properties"); props.load(in); long printRate = 0; int comsumeThreadCount = 0; Object printRateObj = props.get("printRate"); if(printRateObj != null) { printRate = Long.parseLong(printRateObj.toString()); } String clusterName = (String) props.get("clusterName"); String url = (String) props.get("url"); String systemTokenId = (String) props.get("systemIdToken"); String topic = (String) props.get("topic"); String compareTable = (String) props.get("compareTable"); Object consumeThreadObj = props.get("consumeThreadCount"); if(consumeThreadObj != null){ comsumeThreadCount = Integer.parseInt(consumeThreadObj.toString()); } AuditDataCompare dataCompare = new AuditDataCompare(systemTokenId,topic,url,clusterName,compareTable,comsumeThreadCount,printRate); dataCompare.readFromKafka(); } public AuditDataCompare(String systemIdToken, String topic, String url, String clusterName, String compareTable, int consumeThreadCount, long printRate){ this.systemIdToken = systemIdToken; this.topic = topic; this.url = url; this.clusterName = clusterName; this.compareTable = compareTable; this.consumeThreadCount = consumeThreadCount; this.printRate = printRate; hitCount = new HitCount(new Date()); initTimer(); initHbaseConnection(); } public void initTimer(){ printTimer.schedule(new TimerTask() { @Override public void run() { logDQ.info(hitCount.toString()); List<String> notSameWaybillNos = hitCount.getNotSameWaybillNos(); if(CollectionUtils.isNotEmpty(notSameWaybillNos)){ logDQ.info(" these are unlike in hbase :{} ",notSameWaybillNos); hitCount.getNotSameWaybillNos().clear(); } } },0,printRate); } private void initHbaseConnection(){ context = new ClassPathXmlApplicationContext(bigProRes); this.proDataServiceNew = new DataServiceNew<EXP5WaybillRecord>(); proDataServiceNew.setClazz(EXP5WaybillRecord.class); proDataServiceNew.setTableName(compareTable); context = new ClassPathXmlApplicationContext(stgRes); this.stgDataServiceNew = new DataServiceNew<EXP5WaybillRecord>(); stgDataServiceNew.setClazz(EXP5WaybillRecord.class); stgDataServiceNew.setTableName(compareTable); } private boolean compare(String waybillNo){ String hbaseKey = KeyUtils.getOMSWaybillNoKey(waybillNo) + "_"; String stopRowKey = hbaseKey + "a"; List<EXP5WaybillRecord> omsOriginals = proDataServiceNew.find(hbaseKey, true, stopRowKey, true, 50, 0L, 0L); List<EXP5WaybillRecord> omsOriginals1 = stgDataServiceNew.find(hbaseKey, true, stopRowKey, true, 50, 0L, 0L); if(CollectionUtils.isNotEmpty(omsOriginals) && CollectionUtils.isNotEmpty(omsOriginals1)){ if(omsOriginals.size() == omsOriginals1.size()) { return true; } } return false; } private void readFromKafka(){ ConsumeConfig consumeConfig = new ConsumeConfig(systemIdToken, url, clusterName, topic, consumeThreadCount); try { KafkaConsumerRegister.registerByteArrayConsumer(consumeConfig,new IByteArrayMessageConsumeListener(){ String json = new String(); @Override public void onMessage(List<byte[]> list) throws KafkaConsumeRetryException { for (byte[] bytes : list) { json = new String(bytes); WaybillInfoDto dto = JsonUtil.json2Object(json, WaybillInfoDto.class); boolean isSame = compare(dto.getWaybillNo()); if(isSame){ hitCount.hit(); }else{ hitCount.putWaybillNos(dto.getWaybillNo()); hitCount.miss(); } } } }); } catch (KafkaException e) { throw new RuntimeException(e); } } public void setSystemIdToken(String systemIdToken) { this.systemIdToken = systemIdToken; } public void setUrl(String url) { this.url = url; } public void setClusterName(String clusterName) { this.clusterName = clusterName; } public void setTopic(String topic) { this.topic = topic; } public void setConsumeThreadCount(int consumeThreadCount) { this.consumeThreadCount = consumeThreadCount; } public void setCompareTable(String compareTable) { this.compareTable = compareTable; } public void setPrintRate(long printRate){ this.printRate = printRate; } static class HitCount{ private AtomicLong same = new AtomicLong(); private AtomicLong notSame = new AtomicLong(); private List<String> notSameWaybillNos = new ArrayList<String>(); private Date startTm ; public HitCount(Date date){ this.startTm = date; } public double hitRate() { long requestCount = same.get() + notSame.get(); return (requestCount == 0) ? 1.0 : formatRate(same.doubleValue() / requestCount); } private double formatRate(double d) { return new BigDecimal(d).setScale(2, RoundingMode.HALF_UP) .doubleValue(); } public void hit(){ same.incrementAndGet(); } public void miss(){ notSame.incrementAndGet(); } public void putWaybillNos(String waybillNo){ if(StringUtils.isNotEmpty(waybillNo)) { notSameWaybillNos.add(waybillNo); } } public List<String> getNotSameWaybillNos(){ return this.notSameWaybillNos; } @Override public String toString() { return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE) .append("hitCount", same) .append("missCount", notSame) .append("hitRate", hitRate()) .append("startDate", DateFormatUtils.format(startTm, "yyyy-MM-dd HH:mm:ss")) .toString(); } }
接Kafka数据比对不同hbase中数据
最新推荐文章于 2022-08-20 21:19:59 发布