HBase Java API

前言

在前一章内, 我们介绍了如何在HBase Shell内使用HBase. 本章, 我们继续接着上章的内容, 完成Java内实现上述的功能.


前置条件

  • HBase
  • Hadoop

正文

我们可以通过 HBase Client Java API 实现如下的操作:

  • 初始化

  • 创建表

  • 删除表

  • 插入/修改数据

  • 删除数据

  • 查询一条数据

  • Scan方法获取数据

  • Scan方法结合三种过滤器

  • Scan方法结合过滤器组

  • 初始化

// 获取配置相关信息
	static Configuration config = null;
	private Connection connection = null;
	private Table table = null;
	
	@Before
	public void init() throws IOException{
		config = HBaseConfiguration.create();
		config.set("hbase.zookeeper.quorum", "localhost:2181,localhost:2182,localhost:2183");
//		config.set("hbase.zookeeper.property.clientport", value);
		connection = ConnectionFactory.createConnection();
		// 注意 存储的时候都是使用 byte类型数据进行存储的.
		table = connection.getTable(TableName.valueOf("user"));
	}
  • 创建表
// DML 创建一张表
	@Test
	public void createTable() throws MasterNotRunningException, ZooKeeperConnectionException, IOException{
		// 创建表管理类
		HBaseAdmin admin = new HBaseAdmin(config);// hbase表管理
		// 创建表描述类
		TableName tableName = TableName.valueOf("user10029");
		HTableDescriptor desc = new HTableDescriptor(tableName);
		// 创建表的描述类 - 列族1
		HColumnDescriptor family = new HColumnDescriptor("info1");
		desc.addFamily(family);
		HColumnDescriptor family2 = new HColumnDescriptor("info2");
		desc.addFamily(family2);
		
		// 创建表
		admin.createTable(desc);
	}
  • 删除表
@Test
	// 删除表
	public void deleteTable() throws MasterNotRunningException, ZooKeeperConnectionException, IOException{
		// 创建表管理类
		HBaseAdmin admin = new HBaseAdmin(config);// hbase表管理
		admin.disableTable("user10029");
		admin.deleteTable("user10029");
		admin.close();
	}
  • 插入/修改数据
@Test
	public void insertData() throws IOException{
		// 创建数据封装类
		// 创建的前面是Row Key, 也就是键值对
		Put put = new Put(Bytes.toBytes("wangshifu_10029"));
		put.add(Bytes.toBytes("info1"), Bytes.toBytes("name"), Bytes.toBytes("zhangsan"));
		put.add(Bytes.toBytes("info1"), Bytes.toBytes("age"), Bytes.toBytes("1000"));
		put.add(Bytes.toBytes("info1"), Bytes.toBytes("sex"), Bytes.toBytes("male"));
		table.put(put);
	}
  • 删除数据
@Test
	// 删除数据
	public void deleteData() throws IOException{
		// 创建删除的Row
		Delete delete = new Delete(Bytes.toBytes("wangshifu_10029"));
		delete.addColumn(Bytes.toBytes("info1"), Bytes.toBytes("sex"));
		// 貌似还有直接删除一个列族内数据的
//		delete.addFamily(family)
		// 删除操作
		table.delete(delete);
	}
  • 查询一条数据
// 查询数据 - 单条数据查询
	@Test
	public void queryData() throws IOException{
		try{
		// 创建封装查询条件类
		Get get = new Get(Bytes.toBytes("wangshifu_10029"));
		get.addColumn(Bytes.toBytes("info1"), Bytes.toBytes("name"));
		
		Result result = table.get(get);
		byte[] name = result.getValue(Bytes.toBytes("info1"), Bytes.toBytes("name"));
		byte[] age = result.getValue(Bytes.toBytes("info1"), Bytes.toBytes("age"));

		System.out.println(Bytes.toString(name));
//		System.out.println(Bytes.toString(age));
		}catch(Exception e){
			e.printStackTrace();
		}
	}
  • Scan方法获取数据(全表扫描 - 速度较慢)
// 全表扫描
	@Test
	public void scanData() throws IOException{
		// 设置全表扫描封装类
		Scan scan = new Scan();
		// 设置扫描类的条件 (字典顺序的区间)
		scan.setStartRow(Bytes.toBytes("3"));
//		scan.setStopRow(Bytes.toBytes("100"));
		// 设置单独扫某一列
		scan.addColumn(Bytes.toBytes("info1"), Bytes.toBytes("name"));
		
		// 扫描
		ResultScanner scanner = table.getScanner(scan);
		
		for(Result result:scanner){
			byte[] name = result.getValue(Bytes.toBytes("info1"), Bytes.toBytes("name"));
			byte[] age = result.getValue(Bytes.toBytes("info1"), Bytes.toBytes("age"));
			System.out.println(Bytes.toString(name));
			System.out.println(Bytes.toString(age));
		}
	}
  • Scan方法结合三种过滤器
    • 全表扫描 设置过滤器
    • 列值过滤器-SingleColumnValueFilter
    • 列名过滤器-ColumnPrefixFilter(addColumn也能实现,这主要支持正则匹配)
    • 多个列名前缀过滤器-MultipleColumnPrefixFilter
    • rowKey过滤器-RowFilter
    • 多个过滤器 FilterList
@Test
	public void scanDataByFilter1() throws IOException {
		// 创建过滤器
		// Filter filter = new SingleColumnValueFilter(family, qualifier,
		// compareOp, value)
		Filter filter = new SingleColumnValueFilter(Bytes.toBytes("info1"),
				Bytes.toBytes("name"), CompareOp.EQUAL,
				Bytes.toBytes("zhangsan"));
		Scan scan = new Scan();
		scan.setFilter(filter);

		// 扫描
		ResultScanner scanner = table.getScanner(scan);

		for (Result result : scanner) {
			byte[] name = result.getValue(Bytes.toBytes("info1"),
					Bytes.toBytes("name"));
			byte[] age = result.getValue(Bytes.toBytes("info1"),
					Bytes.toBytes("age"));
			System.out.println(Bytes.toString(name));
			System.out.println(Bytes.toString(age));
		}

	}
	
	
	
	@Test
	public void scanDataByFilter2() throws IOException {
		// 创建过滤器
		// Filter filter = new SingleColumnValueFilter(family, qualifier,compareOp, value)
		
		// 查询已某个列开头的列
		Filter filter = new ColumnPrefixFilter(Bytes.toBytes("name"));
		
		// 多个列名过滤器
		byte [][]prefixes = new byte[][]{Bytes.toBytes("name"),Bytes.toBytes("age")};
		Filter filter2 = new MultipleColumnPrefixFilter(prefixes);

		Scan scan = new Scan();
		scan.setFilter(filter);

		// 扫描
		ResultScanner scanner = table.getScanner(scan);

		for (Result result : scanner) {
			byte[] name = result.getValue(Bytes.toBytes("info1"),
					Bytes.toBytes("name"));
			byte[] age = result.getValue(Bytes.toBytes("info1"),
					Bytes.toBytes("age"));
			System.out.println(Bytes.toString(name));
			System.out.println(Bytes.toString(age));
		}
	}
	
	// 行过滤器
	@Test
	public void scanDataByFilter3() throws IOException {
		// 多个列名过滤器
//		 Filter filter = new RowFilter(rowCompareOp, rowComparator);

		// 通过正在匹配 以谁开头的...行
		 Filter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator("^wangshifu"));

		Scan scan = new Scan();
		scan.setFilter(filter);

		// 扫描
		ResultScanner scanner = table.getScanner(scan);

		for (Result result : scanner) {
			byte[] name = result.getValue(Bytes.toBytes("info1"),
					Bytes.toBytes("name"));
			byte[] age = result.getValue(Bytes.toBytes("info1"),
					Bytes.toBytes("age"));
			System.out.println(Bytes.toString(name));
			System.out.println(Bytes.toString(age));
		}
	}
	
  • Scan方法结合过滤器组
@Test
	public void scanDataByFilter4() throws IOException {
		// 两个条件是and还是or
		// MUST_PASS_ONE OR
		// MUST_PASS_ALL AND
		FilterList filterList = new FilterList(
				FilterList.Operator.MUST_PASS_ONE);
		Filter filter1 = new RowFilter(CompareFilter.CompareOp.EQUAL,
				new RegexStringComparator("^wangshifu"));
		Filter filter2 = new SingleColumnValueFilter(Bytes.toBytes("info1"),
				Bytes.toBytes("name"), CompareOp.EQUAL,
				Bytes.toBytes("zhangsan"));
		filterList.addFilter(filter1);
		filterList.addFilter(filter2);
		
		

	}
发布了311 篇原创文章 · 获赞 327 · 访问量 80万+
展开阅读全文

Java项目连接hbase时超时

04-18

本人小白一枚,现在有个项目是用Java项目连接hbase。我用的是windows下的Java项目连接linux虚拟机上的hbase,hbase开启之后连接提示连接超时(ps:主机之间可互通),求助。。。 log4j:WARN No appenders could be found for logger (org.apache.hadoop.util.Shell). log4j:WARN Please initialize the log4j system properly. SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/E:/apache-tomcat-7.0.85-windows-x64/apache-tomcat-7.0.85/webapps/car_hbase/WEB-INF/lib/slf4j-log4j12-1.5.8.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/E:/apache-tomcat-7.0.85-windows-x64/apache-tomcat-7.0.85/webapps/car_hbase/WEB-INF/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. e785dc9437424bf8a7714f460293896c HBASE表创建失败! java.io.IOException: Failed to get result within timeout, timeout=60000ms at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:232) at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:58) at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:219) at org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:277) at org.apache.hadoop.hbase.client.ClientScanner.loadCache(ClientScanner.java:438) at org.apache.hadoop.hbase.client.ClientScanner.next(ClientScanner.java:312) at org.apache.hadoop.hbase.MetaTableAccessor.fullScan(MetaTableAccessor.java:604) at org.apache.hadoop.hbase.MetaTableAccessor.tableExists(MetaTableAccessor.java:366) at org.apache.hadoop.hbase.client.HBaseAdmin.tableExists(HBaseAdmin.java:410) at org.apache.hadoop.hbase.client.HBaseAdmin.tableExists(HBaseAdmin.java:420) at util.HBaseUtil.createTable(HBaseUtil.java:45) at util.HbaseDemo.createTable(HbaseDemo.java:55) at util.StartupListener.contextInitialized(StartupListener.java:31) at org.apache.catalina.core.StandardContext.listenerStart(StandardContext.java:5118) at org.apache.catalina.core.StandardContext.startInternal(StandardContext.java:5641) at org.apache.catalina.util.LifecycleBase.start(LifecycleBase.java:145) at org.apache.catalina.core.ContainerBase.addChildInternal(ContainerBase.java:1015) at org.apache.catalina.core.ContainerBase.addChild(ContainerBase.java:991) at org.apache.catalina.core.StandardHost.addChild(StandardHost.java:652) at org.apache.catalina.startup.HostConfig.deployDirectory(HostConfig.java:1296) at org.apache.catalina.startup.HostConfig$DeployDirectory.run(HostConfig.java:2038) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) 问答

没有更多推荐了,返回首页

©️2019 CSDN 皮肤主题: 大白 设计师: CSDN官方博客

分享到微信朋友圈

×

扫一扫,手机浏览