packagecom.git.kafka.producer;importjava.util.HashMap;importjava.util.Map;importjava.util.Random;importjava.util.concurrent.ExecutionException;importorg.springframework.beans.factory.annotation.Autowired;importorg.springframework.kafka.core.KafkaTemplate;importorg.springframework.kafka.support.SendResult;importorg.springframework.stereotype.Component;importorg.springframework.util.concurrent.ListenableFuture;importcom.alibaba.fastjson.JSON;importcom.git.kafka.constant.KafkaMesConstant;/*** kafkaProducer模板
* 使用此模板发送消息
*@authorwangb
**/@Componentpublic classKafkaProducerServer{
@Autowiredprivate KafkaTemplatekafkaTemplate;/*** kafka发送消息模板
*@paramtopic 主题
*@paramvalue messageValue
*@paramifPartition 是否使用分区 0是\1不是
*@parampartitionNum 分区数 如果是否使用分区为0,分区数必须大于0
*@paramrole 角色:bbc app erp...*/
public MapsndMesForTemplate(String topic, Object value, String ifPartition,
Integer partitionNum, String role){
String key= role+"-"+value.hashCode();
String valueString=JSON.toJSONString(value);if(ifPartition.equals("0")){//表示使用分区
int partitionIndex =getPartitionIndex(key, partitionNum);
ListenableFuture> result =kafkaTemplate.send(topic, partitionIndex, key, valueString);
Map res =checkProRecord(result);returnres;
}else{
ListenableFuture> result =kafkaTemplate.send(topic, key, valueString);
Map res =checkProRecord(result);returnres;
}
}/*** 根据key值获取分区索引
*@paramkey
*@parampartitionNum
*@return
*/
private int getPartitionIndex(String key, intpartitionNum){if (key == null) {
Random random= newRandom();returnrandom.nextInt(partitionNum);
}else{int result = Math.abs(key.hashCode())%partitionNum;returnresult;
}
}/*** 检查发送返回结果record
*@paramres
*@return
*/@SuppressWarnings("rawtypes")private Map checkProRecord(ListenableFuture>res){
Map m = new HashMap();if(res!=null){try{
SendResult r= res.get();//检查result结果集
/*检查recordMetadata的offset数据,不检查producerRecord*/Long offsetIndex=r.getRecordMetadata().offset();if(offsetIndex!=null && offsetIndex>=0){
m.put("code", KafkaMesConstant.SUCCESS_CODE);
m.put("message", KafkaMesConstant.SUCCESS_MES);returnm;
}else{
m.put("code", KafkaMesConstant.KAFKA_NO_OFFSET_CODE);
m.put("message", KafkaMesConstant.KAFKA_NO_OFFSET_MES);returnm;
}
}catch(InterruptedException e) {
e.printStackTrace();
m.put("code", KafkaMesConstant.KAFKA_SEND_ERROR_CODE);
m.put("message", KafkaMesConstant.KAFKA_SEND_ERROR_MES);returnm;
}catch(ExecutionException e) {
e.printStackTrace();
m.put("code", KafkaMesConstant.KAFKA_SEND_ERROR_CODE);
m.put("message", KafkaMesConstant.KAFKA_SEND_ERROR_MES);returnm;
}
}else{
m.put("code", KafkaMesConstant.KAFKA_NO_RESULT_CODE);
m.put("message", KafkaMesConstant.KAFKA_NO_RESULT_MES);returnm;
}
}
}