spring boot读取kafka的数据
@org.springframework.kafka.annotation.KafkaListener(topics = "etl_serial")
public void listen0(List<ConsumerRecord<?, ?>> records, Acknowledgment ack, Consumer<?, ?> consumer){
for (ConsumerRecord<?, ?> record : records) {
JSONObject json = JSON.parseObject((String) record.value());
try{
String tableName = json.getString(CollectionName.TABLENAME);
switch (tableName) {
case CollectionName.BILL : onlineOrderbillServiceImpl.doSave(json);
break;
case CollectionName.RBILLPAYDETAIL : orderbillPaydetailServiceImpl.doSave(json);
break;
case CollectionName.UNDBILL : refoundbillServiceImpl.doSave(json);
break;
case CollectionName.CHANNELFUNDS_DYNAMIC :refundChannelfundsServiceImpl.doSave(json);
default:
}
}catch(Exception e){
// logger.error(e.getMessage() + "...");
}
}
ack.acknowledge();
}
配置
spring.kafka.consumer.auto-commit-interval=100
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.bootstrap-servers=kafka的ip:port
spring.kafka.listener.concurrency=6 (线程数)
spring.kafka.consumer.enable-auto-commit=false(是否自动提交偏移量)
spring.kafka.consumer.group-id=test1 (消费者组ID)
spring.kafka.consumer.max-poll-records=200
spring.kafka.listener.type=batch 开启批量消费
spring.kafka.listener.ack-mode=manual_immediate (ack确认机制)
加载配置文件
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.Properties;
import java.util.Set;
public class Property {
private static final Logger logger = LoggerFactory.getLogger(Property.class);
private static Properties props;
static{
loadProps();
}
synchronized static private void loadProps(){
logger.info("开始加载properties文件内容.......");
props = new Properties();
InputStream in = null;
try {
// 取出application.properties文件参数
InputStream ins = Property.class.getClassLoader().getResourceAsStream("application.properties");
Properties propss = new Properties();
propss.load(ins);
String active = propss.getProperty("spring.profiles.active");
String propertiesName = "application-" + active + ".properties";
in = Property.class.getClassLoader().getResourceAsStream(propertiesName);
props.load(in);
//转码处理
Set<Object> keyset = props.keySet();
for (Object objectKey : keyset) {
String key = (String) objectKey;
//属性配置文件自身的编码
String propertiesFileEncode = "utf-8";
String newValue = new String(props.getProperty(key).getBytes(StandardCharsets.ISO_8859_1), propertiesFileEncode);
props.setProperty(key, newValue);
}
} catch (Exception e) {
logger.error("加载异常:"+e.toString());
} finally {
try {
if(null != in) {
in.close();
}
} catch (IOException e) {
logger.error("spark.properties文件流关闭出现异常");
}
}
logger.info("加载properties文件内容完成...........");
logger.info("properties文件内容:" + props);
}
public static String getProperty(String key){
if(null == props) {
loadProps();
}
return props.getProperty(key);
}
}