HBASE 使用java api 实现增删改查实例

6 篇文章 1 订阅
1 篇文章 0 订阅

我们使用windows 本地myeclipse测试程序,具体搭建教程请看

https://blog.csdn.net/weixin_41407399/article/details/79763235

 

<dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-server</artifactId>
      <version>0.98.6-hadoop2</version>
 </dependency>
 <dependency>
      <groupId>org.apache.hbase</groupId>
      <artifactId>hbase-client</artifactId>
      <version>0.98.6-hadoop2</version>
</dependency>

建一个resource folder ,将hive-site.xml 和log4j放入该文件夹

在hbase创建一个测试用的namespace

一下是java 代码

封装工具类

package com.hadoop.hbaseTest.util;

/**
 * 封装工具类
 */
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;

public class HbaseUtil {
	// 获取HTable
	public static HTable getConf(String tableName) {
		// 创建俺新conf
		Configuration conf = HBaseConfiguration.create();
		HTable htbl = null;
		try {
			htbl = new HTable(conf, tableName);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return htbl;
	}

	// 关闭资源
	public static void closeHtable(HTable htbl) {
		try {
			htbl.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

}

 

dao层的接口

 

package com.hadoop.hbaseTest.dao;
/**
 * dao层借口
 * @author Administrator
 *
 */
public interface OperationDao {

	// 增加数据接口
	public void putData(String rowKey);

	// 删除数据接口
	public void deleteData(String rowKey);

	// 修改数据解耦
	public void updateData(String rowKey);

	// 查看数据接口
	public void getData(String rowKey);

	// 批量导入数据
	public void putAllData(String rowKey);

	// 扫描表接口
	public void scanData(String startRow, String stopRow);

	// 创建表
	public void createTable(String tableName);

	// 删除表
	public void deleteTable(String tableName);

}

dao层接口实现类

package com.hadoop.hbaseTest.dao.daoImpl;
/**
 * dao层接口实现类
 */
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.zookeeper.common.IOUtils;
import com.hadoop.hbaseTest.dao.OperationDao;
import com.hadoop.hbaseTest.util.HbaseUtil;

public class OperationDaoImpl implements OperationDao {

	@Override
	public void putData(String rowKey) {
		// TODO Auto-generated method stub
		try {
			HTable htbl = HbaseUtil.getConf("test:testApi");
			// 将行键传入put
			Put put = new Put(Bytes.toBytes(rowKey));
			// 增加数据
			put.add(Bytes.toBytes("info"), Bytes.toBytes("num"), Bytes.toBytes("hadoop"+1));
			htbl.put(put);
			HbaseUtil.closeHtable(htbl);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void deleteData(String rowKey) {
		// TODO Auto-generated method stub
		// 创建新的Conf

		try {
			HTable htbl = HbaseUtil.getConf("test:testApi");

			// 将行键传入delete
			Delete del = new Delete(Bytes.toBytes(rowKey));
			// 删除行
			del.deleteColumn(Bytes.toBytes("info"), Bytes.toBytes("num"));
			htbl.delete(del);
			HbaseUtil.closeHtable(htbl);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	@Override
	public void updateData(String rowKey) {
		// TODO Auto-generated method stub
		try {
			HTable htbl = HbaseUtil.getConf("test:testApi");
			// 将行键传入put
			Put put = new Put(Bytes.toBytes(rowKey));
			// 增加数据
			put.add(Bytes.toBytes("info"), Bytes.toBytes("num"), Bytes.toBytes("hadoop"+3));
			htbl.put(put);
			HbaseUtil.closeHtable(htbl);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void getData(String rowKey) {
		// TODO Auto-generated method stub
		try {
			HTable htbl = HbaseUtil.getConf("test:testApi");
			// 将行键传入get
			Get get = new Get(Bytes.toBytes(rowKey));
			// 添加查询条件
			get.addColumn(Bytes.toBytes("info"), Bytes.toBytes("num"));
			// 获取结果集合
			Result rs = htbl.get(get);
			Cell[] cells = rs.rawCells();
			// 循环遍历结果
			for (Cell cell : cells) {
				// 打印结果
				System.out.print(Bytes.toString(CellUtil.cloneFamily(cell)) + ":");
				System.out.print(Bytes.toString(CellUtil.cloneQualifier(cell)) + "->");
				System.out.print(Bytes.toString(CellUtil.cloneValue(cell)));
			}
			System.out.println();
			HbaseUtil.closeHtable(htbl);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void putAllData(String rowKey) {
		// TODO Auto-generated method stub
		try {
			HTable htbl = HbaseUtil.getConf("test:testApi");
			List<Put> list = new ArrayList<Put>(10000);
			// 增加数据
			for (long i = 1; i <= 1000000; i++) {
				Put put = new Put(Bytes.toBytes(rowKey + i));
				put.add(Bytes.toBytes("info"), Bytes.toBytes("num"), Bytes.toBytes("hadoop" + i));
				list.add(put);
				//每到10万次导入一次数据
				if (i % 100000 == 0) {
					htbl.put(list);
					list = new ArrayList<Put>(10000);
				}
			}
			//数据如果不是10万整数倍,剩余数据循环结束一次导入
			htbl.put(list);
			HbaseUtil.closeHtable(htbl);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	@Override
	public void scanData(String startRow, String stopRow) {
		// TODO Auto-generated method stub
		HTable htbl = null;
		ResultScanner rss = null;
		try {
			htbl = HbaseUtil.getConf("test:testApi");
			// 将行键传入scan
			Scan scan = new Scan(Bytes.toBytes(startRow), Bytes.toBytes(stopRow));
			// 添加查询条件
			scan.addColumn(Bytes.toBytes("info"), Bytes.toBytes("num"));
			// 获取结果集集合
			rss = htbl.getScanner(scan);
			// 遍历结果集集合
			for (Result rs : rss) {
				System.out.print(Bytes.toString(rs.getRow())+"\t");
				// 遍历结果集合
				Cell[] cells = rs.rawCells();
				for (Cell cell : cells) {
					System.out.print(Bytes.toString(CellUtil.cloneFamily(cell)) + ":");
					System.out.print(Bytes.toString(CellUtil.cloneQualifier(cell)) + "->");
					System.out.print(Bytes.toString(CellUtil.cloneValue(cell)));
				}
				System.out.println();
			}
		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			// 关闭资源
			IOUtils.closeStream(rss);
			IOUtils.closeStream(htbl);
		}
	}

	@Override
	public void createTable(String tableName) {
		// TODO Auto-generated method stub
		Configuration conf = HBaseConfiguration.create();
		//conf.set("hbase.zookeeper.quorum", "master");
		try {
			HBaseAdmin hba = new HBaseAdmin(conf);
			HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
			HColumnDescriptor info = new HColumnDescriptor("info");
			info.setValue("num", "003");
			htd.addFamily(info);
			hba.createTable(htd);
			hba.close();

		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}

	}

	@Override
	public void deleteTable(String tableName) {
		// TODO Auto-generated method stub
		Configuration conf = HBaseConfiguration.create();
		try {
			HBaseAdmin hba = new HBaseAdmin(conf);
			hba.disableTable(tableName);
			hba.deleteTable(tableName);
			hba.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
}

 

测试类

 

package com.hadoop.hbaseTest.service;
/**
 * 测试类
 */

import com.hadoop.hbaseTest.dao.daoImpl.OperationDaoImpl;

public class Test {
	public static void main(String[] args) {
		OperationDaoImpl od = new OperationDaoImpl();
		String tableName = "test:testApi";
		String rowKey = "rk";
		od.createTable(tableName);
		od.putData(rowKey);
		od.getData(rowKey);
		od.updateData(rowKey);
		od.deleteData(rowKey);
		od.putAllData(rowKey);
		od.scanData("rk1", "rk1200");
		od.deleteTable(tableName);
	}
}

下面是测试结果

创建表

运行后,创建表成功

插入数据

运行插入成功

查询数据

运行查询成功

修改数据

运行修改成功,hadoop1 变成hadoop2

删除数据

运行一次,删除最近时间戳版本,回到hadoop1

再次运行,删除前一时间戳版本,删除成功,数据清空

批量导入数据

运行,去habse 查询数据,插入成功

扫描笔

运行插入成功

删除表

运行删除成功

  • 5
    点赞
  • 15
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
好的,下面是一个基于JavaHBase API实现增删改查的操作: 首先,需要引入HBaseJava API依赖库,可以使用Maven或Gradle等工具来管理依赖,这里以Maven为例,在pom.xml中添加以下依赖: ```xml <dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-core</artifactId> <version>1.2.1</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase</artifactId> <version>1.4.10</version> </dependency> </dependencies> ``` 然后,就可以编写对HBase进行增删改查的代码了。下面是一个简单的例子,其中包括了创建、插入数据、查询数据、更新数据和删除数据等操作: ```java import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.util.Bytes; public class HBaseExample { private static Configuration conf; private static Connection conn; static { conf = HBaseConfiguration.create(); conf.set("hbase.zookeeper.quorum", "localhost"); conf.set("hbase.zookeeper.property.clientPort", "2181"); try { conn = ConnectionFactory.createConnection(conf); } catch (IOException e) { e.printStackTrace(); } } public static void main(String[] args) throws Exception { // 创建 String tableName = "example_table"; String[] familyNames = { "cf1", "cf2" }; createTable(tableName, familyNames); // 插入数据 String rowKey = "row1"; String familyName = "cf1"; String columnName = "column1"; String value = "value1"; putData(tableName, rowKey, familyName, columnName, value); // 查询数据 getData(tableName, rowKey, familyName, columnName); // 更新数据 value = "value2"; putData(tableName, rowKey, familyName, columnName, value); // 再次查询数据 getData(tableName, rowKey, familyName, columnName); // 删除数据 deleteData(tableName, rowKey, familyName, columnName); // 删除 deleteTable(tableName); } /** * 创建 */ public static void createTable(String tableName, String[] familyNames) throws Exception { Admin admin = conn.getAdmin(); if (admin.tableExists(TableName.valueOf(tableName))) { System.out.println("Table " + tableName + " already exists."); } else { TableName tn = TableName.valueOf(tableName); List<byte[]> families = new ArrayList<>(); for (String familyName : familyNames) { families.add(Bytes.toBytes(familyName)); } admin.createTable(new org

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值