HBase0.98.6.1 基本包

在操作hbase时,因为hbase往往是跟hadoop和Tomcat在一起使用的,所以如果强行直接导入/lib下所有的包,会导致一些冲突,进而连最简单的jsp页面都无法打开,今天一个包一个包的看,真是一把辛酸泪啊!

什么都不说了,直接上包

hbase-client-0.98.6.1-hadoop2.jar
hbase-common-0.98.6.1-hadoop2.jar
hbase-common-0.98.6.1-hadoop2-tests.jar
hbase-examples-0.98.6.1-hadoop2.jar
hbase-hadoop2-compat-0.98.6.1-hadoop2.jar
hbase-hadoop-compat-0.98.6.1-hadoop2.jar
hbase-it-0.98.6.1-hadoop2.jar
hbase-prefix-tree-0.98.6.1-hadoop2.jar
hbase-protocol-0.98.6.1-hadoop2.jar
hbase-server-0.98.6.1-hadoop2.jar
hbase-server-0.98.6.1-hadoop2-tests.jar
hbase-shell-0.98.6.1-hadoop2.jar
hbase-testing-util-0.98.6.1-hadoop2.jar
hbase-thrift-0.98.6.1-hadoop2.jar
hadoop-common-2.2.0.jar
protobuf-java-2.5.0.jar
commons-logging-1.1.1.jar
log4j-1.2.17.jar
guava-12.0.1.jar
zookeeper-3.4.6.jar
commons-lang-2.6.jar
commons-configuration-1.6.jar
hadoop-auth-2.2.0.jar
slf4j-log4j12-1.6.4.jar
slf4j-api-1.6.4.jar
htrace-core-2.04.jar
netty-3.6.6.Final.jar
commons-codec-1.7.jar

导入这些包基本上够用了,做一些基本的操作是可以的了,顺便给大家一个福利,上代码!


下面是一个工具类,主要是配置configuration和HBaseAdmin

package com.skywo.util;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.HBaseAdmin;

public class HbaseUtil {
				private Configuration conf;
				private HBaseAdmin admin;
				
				public HbaseUtil(){
					//Create Configuration
					conf = HBaseConfiguration.create();
					//这些配置都是默认的,如果没有改hbase的配置参数就可以直接用
					conf.set("hbase.zookeeper.property.clientPort", "2181");
					conf.set("hbase.zookeeper.quorum", "localhost");
				}
				
				public void createAdmin(){//Create HBaseAdmin
					try {
						admin = new HBaseAdmin(conf);
					} catch (MasterNotRunningException e) {
						e.printStackTrace();
					} catch (ZooKeeperConnectionException e) {
						e.printStackTrace();
					} catch (IOException e) {
						e.printStackTrace();
					}
				}
				
				public Configuration getConfiguration(){			
					createAdmin();
					return conf;					
				}
				
				public HBaseAdmin getHBaseAdmin(){
					createAdmin();
					return admin;
				}
				
				public void close(){
					try {
						admin.close();
					} catch (IOException e) {
						e.printStackTrace();
					}
				}
}

这里是一个测试类

package com.skywo.hbasetest;


import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;

import com.skywo.util.HbaseUtil;

public class HbaseTest {
			private HbaseUtil util;
			Configuration configuration;
			HBaseAdmin admin;
			
			public HbaseTest(){
				util = new HbaseUtil();
				
				configuration = util.getConfiguration();				
				admin = util.getHBaseAdmin();
			}
			
			public static void main(String args[]){
				HbaseTest test = new HbaseTest();
				test.run();
			}
			
			public void run(){				
				//this.createTable("skywo");
				/*
				System.out.println("QueryAll......");
				this.queryAll("skywo");
				System.out.println();
				
				
				System.out.println("QueryByRowKey.......");
				this.queryByRowKey("skywo", "1416826336607");
				System.out.println();
				
				
				/*
				System.out.println("DeleteByRowKey......");
				this.deleteByRowkey("skywo",  "1416498385195");
				
				
				System.out.println("QueryAll......");
				this.queryAll("skywo");
				System.out.println();
				
				System.out.println("QueryBySingleColumn.......");
				this.queryBySingleColumn("skywo", "c1");
				System.out.println();
				
				String[] columnNames = {"c1", "c2"};
				String[] columnValues = {"scarecrow", "bbb"};
				System.out.println("QueryByMultipleColumn.......");
				this.queryByMultipleColumn("skywo", columnNames, columnValues);
				System.out.println();
				*//*
				long currentTime = System.currentTimeMillis();
				String time = String.valueOf(currentTime);
				Put put = new Put(time.getBytes());
				System.out.println("Input......");
				put.add("c1".getBytes(), null, "scarecrow".getBytes());
				put.add("c2".getBytes(), null, "老板".getBytes());
				put.add("c3".getBytes(), null, "钟".getBytes());
				this.insertData("skywo", put);*/
				
				System.out.println("QueryAll......");
				this.queryAll("newproduct");
				System.out.println();
				/*
				
				int num = 2;
				System.out.println("QueryByPageNumber.................");
				this.queryByPageNumber("skywo", num);
				*/
			}
			
			public void createTable(String tableName){
				System.out.println("start create table ......");
				
				try {
					if(admin.tableExists(tableName)){
						admin.disableTable(tableName);
						admin.deleteTable(tableName);
						System.out.println(tableName + " exists, deleting......");
					}
					
					HTableDescriptor descriptor = new HTableDescriptor(tableName);
					descriptor.addFamily(new HColumnDescriptor("c1"));
					descriptor.addFamily(new HColumnDescriptor("c2"));
					descriptor.addFamily(new HColumnDescriptor("c3"));
					
					admin.createTable(descriptor);
				} catch (IOException e) {
					e.printStackTrace();
				}finally{
					util.close();
				}
			}
			
			public void dropTable(String tableName){
				try {
					if(admin.tableExists(tableName)){
						admin.disableTable(tableName);
						admin.deleteTable(tableName);
					}
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
			
			@SuppressWarnings("deprecation")
			public void insertData(String tableName, Put put){
				
				System.out.println("start insert data ......");

				try {
					HTable table = new HTable(configuration, tableName);
					table.put(put);
					table.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
				System.out.println("end insert data ......"); 
			}
			
			
			
			public void queryAll(String tableName){
				try {
					HTable table = new HTable(configuration, tableName);
					ResultScanner rs = table.getScanner(new Scan());
					for(Result r : rs){
						System.out.println("Rowkey: " + new String(r.getRow()));
						for(KeyValue keyValue : r.raw()){
							System.out.println("列: " + new String(keyValue.getFamily()) + "         值: " + new String(keyValue.getValue()));
						}
					}
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
			
			//根据rowkey查询
			public void queryByRowKey(String tableName, String rowkey){
				try {
					HTable table = new HTable(configuration, tableName);
					Get scan = new Get(rowkey.getBytes());
					Result r = table.get(scan);
					System.out.println("Rowkey: " + new String(r.getRow()));
					for(KeyValue keyValue : r.raw()){
						System.out.println("列: " + new String(keyValue.getFamily()) + "         值: " + new String(keyValue.getValue()));
					}
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
			
			// 根据column 的值查询
			public void queryBySingleColumn(String tableName, String columnName){
				try {
					HTable table = new HTable(configuration, tableName);
					Filter filter = new SingleColumnValueFilter("c1".getBytes(), null, CompareOp.EQUAL, "scarecrow".getBytes());
					Scan s = new Scan();
					s.setFilter(filter);
					ResultScanner rs = table.getScanner(s);
					for(Result r : rs){
						System.out.println("Rowkey: " + new String(r.getRow()));
						for(KeyValue keyValue : r.raw()){
							System.out.println("列: " + new String(keyValue.getFamily()) + "         值: " + new String(keyValue.getValue()));
						}
					}
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
			
			// 根据多条column的值查询,多条件查询
			public void queryByMultipleColumn(String tableName, String[] columnNames, String [] columnValues){
				
				List<Filter> filters = new ArrayList<Filter>();
				
				try {
					HTable table = new HTable(configuration, tableName);
					
					int len = columnNames.length;
					for(int i = 0 ; i < len ; i++){
						Filter filter = new SingleColumnValueFilter(columnNames[i].getBytes(), null, CompareOp.EQUAL, columnValues[i].getBytes());
						filters.add(filter);
					}
					FilterList filterList = new FilterList(filters);
					
					Scan s = new Scan();
					s.setFilter(filterList);
					ResultScanner rs = table.getScanner(s);
					for(Result r : rs){
						System.out.println("Rowkey: " + new String(r.getRow()));
						for(KeyValue keyValue : r.raw()){
							System.out.println("列: " + new String(keyValue.getFamily()) + "         值: " + new String(keyValue.getValue()));
						}
					}
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
			
			// 指定查询n条记录
			public void queryByPageNumber(String tableName, int number){
				try {
					HTable table = new HTable(configuration, tableName);
					Filter filter = new PageFilter(number);
					Scan s = new Scan();
					s.setFilter(filter);
					ResultScanner rs = table.getScanner(s);
					for(Result r : rs){
						System.out.println("Rowkey: " + new String(r.getRow()));
						for(KeyValue keyValue : r.raw()){
							System.out.println("列: " + new String(keyValue.getFamily()) + "         值: " + new String(keyValue.getValue()));
						}
					}
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
			
			public void deleteByRowkey(String tableName, String rowKey){
				try {
					HTable table = new HTable(configuration, tableName);
					List list = new ArrayList();
					Delete d1 = new Delete(rowKey.getBytes());
					list.add(d1);
					table.delete(list);
					
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
}


明天我再给出hadoop的基本包,现在太晚了比较困,所以抱歉没有给出太详细的注释,有什么看不明白的欢迎跟我讨论哦~

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值