import org.apache.hadoop.hbase.util.Bytes;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.Properties;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {"classpath:spring.xml"})
public class ConsumerTest {
@Autowired
private RedisService redisService;
private static final Logger logger = LoggerFactory.getLogger(ConsumerTest.class);
private static final String TOPIC = "BuB-0200";
private KafkaConsumer<String, String> consumer;
@Test
public void testComsumer() {
Properties props = new Properties();
props.put("bootstrap.servers", "node-07.bigdata:6667,node-08.bigdata:6667,node-09.bigdata:6667,node-10.bigdata:6667");
props.put("group.id", "1");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
consumer = new KafkaConsumer<String, String>(props);
try {
//实例化Topic主题和分区数
TopicPartition topicPartition = new TopicPartition(TOPIC,9);
//分配消费者主题分区
consumer.assign(Arrays.asList(topicPartition));
//指定读取的offset偏移位置
consumer.seek(topicPartition,30518019l);
while (true) {
//循环的拉取Kafka的下一条数据
ConsumerRecords<String, String> records = consumer.poll(1);
for (ConsumerRecord<String, String> record : records) {
String value = record.value();
Long offset = record.offset();
Integer partition = record.partition();
Req_0200_Hbase req = redisService.deserializeAsObject(value, Req_0200_Hbase.class);
HbaseRow hbaseRow = req.getGps();
final Gps gpsPo = new Gps();
for (HbaseCell hbaseCell:hbaseRow.getHbaseCellList()){
try {
PropertyUtils.setProperty(gpsPo, hbaseCell.getQualifier(), Bytes.toString(hbaseCell.getValue()));
}catch (Exception e){
e.printStackTrace();
logger.error("qualifier:{},value:{}", hbaseCell.getQualifier(), Bytes.toString(hbaseCell.getValue()));
}
}
if(gpsPo.getSim() == 17000003507l){
String rowKey = hbaseRow.getRowKey();
String[] data = rowKey.split("_");
gpsPo.setVid(Long.valueOf(data[0]));
gpsPo.setGpsTime(new Date(Long.valueOf(data[1])+1483200000000l));
logger.info("接收到的GPS数据,partition:{},offset:{},sim:{},rowKey:{},gpsTime:{}",partition,offset,gpsPo.getSim(),rowKey,
new SimpleDateFormat("YYYY-MM-dd HH:mm:ss").format(gpsPo.getGpsTime()));
}
}
}
} finally {
consumer.close();
}
}
通过分区和offset拉取Kafka的数据
最新推荐文章于 2024-06-06 16:04:46 发布