写在前面,最好将虚拟机的ip地址映射写到windows下的hosts文件中,不然连接会非常的耗时。具体路径:C:\Windows\System32\drivers\etc\hosts
1.新建maven项目——kafkatohbase
项目架构如下:
2.导入pom.xml的依赖
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<kafka.version>2.0.0</kafka.version>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>${kafka.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-client -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>1.2.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-common -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-common</artifactId>
<version>1.2.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-server -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>1.2.0</version>
</dependency>
</dependencies>
3.创建读kafka主题的接口——Read.java
package cn.alisa.kafkatohbase.services;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import java.util.Properties;
//读kafka的接口
public interface Read {
public ConsumerRecords<String,String> readKafka(Properties prop,String topic);
}
4.创建写到hbase中的写接口——Write.java
package cn.alisa.kafkatohbase.services;
import org.apache.kafka.clients.consumer.ConsumerRecords;
//往hbase中写的接口
public interface Write {
public void saveDataToHBase(ConsumerRecords<String,String> records);
}
5.创建实现Read接口的方法——KafkaReadImpl.java
package cn.alisa.kafkatohbase.services.kafkareadimpl;
import cn.alisa.kafkatohbase.services.Read;
import cn.alisa.kafkatohbase.services.Write;
import cn.alisa.kafkatohbase.services.commons.KafkaUtils;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class KafkaReadImpl implements Read {
private Write write;
//构造器
public KafkaReadImpl(Write write){
this.write=write;
}
@Override
public ConsumerRecords<String, String> readKafka(Properties prop,String topic) {
//获得kafka的consumer
KafkaConsumer<String,