1. 依赖引入
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.7.5</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.beantechs</groupId>
<artifactId>xmap-chanels</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>xmap-chanels</name>
<description>Demo project for Spring Boot</description>
<properties>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<!--hbase依赖-->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>2.0.5</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>2.0.5</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-mapreduce</artifactId>
<version>2.0.5</version>
</dependency>
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
</dependency>
<!-- 集成redis依赖 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
2.配置文件
hbase-site.xml:
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>hbase.rootdir</name>
<value>hdfs://hadoop100:8020/hbase</value>
</property>
<!--开启hbase分布式存储模式-->
<property>
<name>hbase.cluster.distributed</name>
<value>true</value>
</property>
<!--zookeeper集群-->
<property>
<name>hbase.zookeeper.quorum</name>
<value>hadoop100,hadoop102,hadoop103</value>
</property>
<!-- zk session超时时间 -->
<property>
<name>zookeeper.session.timeout</name>
<value>60000</value>
</property>
</configuration>
3.代码实现
package com.beantechs.services;
import com.beantechs.utils.DateUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.core.ValueOperations;
import org.springframework.data.redis.serializer.GenericJackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.RedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Service;
import redis.clients.jedis.Jedis;
import javax.annotation.Resource;
import java.io.IOException;
import java.io.Serializable;
@Service
public class KafkaConsumer implements Serializable {
private static final long serialVersionUID = -1;
@Resource
StringRedisTemplate stringRedisTemplate;
//监听kafkatopic
@KafkaListener(topics = "map_test_topic",groupId = "map-g1")
public String test(ConsumerRecord<Object,String> record ){
String value = record.value();
System.out.println(value);
String[] split = value.split("\t");
String laneIdx = split[2];
String type = split[3];
String tid = split[4];
//生成车辆id,车辆id位固定值
String geom = split[7];
//时间处理:转为时间戳,即10位数字,使用10位最大数减去时间戳:29/8/2022 16:02:43
String createTime = split[6];
String replace = createTime.replace("'", "");
Long timeStamp1= DateUtils.getTimeStamp(replace);
//生成rowKey: 网格id,vin,createTime,以横线隔开
// String gridId = IDUtils.getTrackId();e60e85e1
String gridId ="e60e85e1";
String rowKey= gridId+"-"+"666666"+"-"+timeStamp1;
//创建Put对象
Put put = new Put(Bytes.toBytes(rowKey));
put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("tid"),Bytes.toBytes(tid));
put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("geom"),Bytes.toBytes(geom));
put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("laneIdx"),Bytes.toBytes(laneIdx));
put.addColumn(Bytes.toBytes("info"),Bytes.toBytes("type"),Bytes.toBytes(type));
//将数据保存到hbase上
//hbase配置
Configuration hbaseConf = HBaseConfiguration.create();
try {
Connection conn = ConnectionFactory.createConnection(hbaseConf);
Table table=conn.getTable(TableName.valueOf("xmap","line_opt"));
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("数据保存成功");
//将网格id保存到redis中,并统计次数
ValueOperations<String, String> operations = stringRedisTemplate.opsForValue();
operations.increment(gridId, 1L);
System.out.println("同一网格数据条数:"+operations.get(gridId));
// Jedis jedis = new Jedis("hadoop100");
// jedis.incrBy(gridId,1L);
// String s = jedis.get(gridId);
// System.out.println("同一网格数据条数:"+s);
return "the data insert into hbase success";
}
}
4.成果展示