kafak中的数据传入hbase

文章目录

package my.test.kafka_hbase;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;

/**
 * 将kafka的topic user_friend数据导到hbase中
 */
public class UserFriendhb {
    public static void main(String[] args) {
        //kafka
        Properties prop = new Properties();
        prop.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.226.111:9092");
        prop.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        prop.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        prop.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000");
        prop.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
        prop.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        prop.put(ConsumerConfig.GROUP_ID_CONFIG, "aa99");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(prop);
        consumer.subscribe(Collections.singletonList("train_1"));

        //hbase
        Configuration config = HBaseConfiguration.create();
        config.set("hbase.rootdir", "hdfs://192.168.226.111:9000/hbase");
        config.set("hbase.zookeeper.quorum", "192.168.226.111");
        config.set("hbase.zookeeper.property.clientPort", "2181");

        try {
            Connection connection = ConnectionFactory.createConnection(config);
            Table table = connection.getTable(TableName.valueOf("events_db:train"));

            while (true) {
                //poll 传的是时间 时间段内broker中可消费的数据 时间一到就返给消费者
                ConsumerRecords<String, String> records = consumer.poll(100);
                //list一定要在while里面
                List<Put> putList = new ArrayList<>();
                for (ConsumerRecord<String, String> record : records) {
                    System.out.println(record);
                    String[] infos = record.value().split(",");

                    Put put = new Put(Bytes.toBytes((infos[0] + infos[1]).hashCode()));//key值 去重用
                    
                    put.addColumn("eu".getBytes(), "user".getBytes(), infos[0].getBytes());

                    put.addColumn("eu".getBytes(), "event".getBytes(), infos[1].getBytes());

                    put.addColumn("eu".getBytes(), "invited".getBytes(), infos[2].getBytes());

                    put.addColumn("eu".getBytes(), "timestamp".getBytes(), infos[3].getBytes());

                    put.addColumn("eu".getBytes(), "interested".getBytes(), infos[4].getBytes());

                    put.addColumn("eu".getBytes(), "not_interested".getBytes(), infos[5].getBytes());

                    putList.add(put);
                    System.out.println("--------------");
                }
                table.put(putList);
                table.close();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值