请先创建一张表,命令如下:
create 'testtable','colfam1'
然后在执行下面程序(在eclipse上执行就直接是run as java application):
1、向表中加入数据:
package hbaseTest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class PutTest
{
public static void main(String[] args) throws IOException
{
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf,"testtable");
//单个存入
Put put1 = new Put(Bytes.toBytes("row1"));
put1.add(Bytes.toBytes("colfam1"), Bytes.toBytes("col1"), Bytes.toBytes("val1"));
put1.add(Bytes.toBytes("colfam1"),Bytes.toBytes("col2"),Bytes.toBytes("val2"));
//以KeyValue键入put
KeyValue kv = new KeyValue(Bytes.toBytes("row1"),Bytes.toBytes("colfam1"),Bytes.toBytes("col5"),Bytes.toBytes("val1"));
put1.add(kv);
table.put(put1);
//列表输入
List<Put> puts = new ArrayList<Put>();
Put put2 = new Put(Bytes.toBytes("row2"));
put2.add(Bytes.toBytes("colfam1"),Bytes.toBytes("col1"),Bytes.toBytes("val1"));
Put put3 = new Put(Bytes.toBytes("row2"));
put3.add(Bytes.toBytes("colfam1"),Bytes.toBytes("col2"),Bytes.toBytes("val2"));
puts.add(put2);
puts.add(put3);
table.put(puts);
//缓冲输出
table.setAutoFlush(false);
Put put4 = new Put(Bytes.toBytes("row3"));
put4.add(Bytes.toBytes("colfam1"),Bytes.toBytes("col3"),Bytes.toBytes("val5"));
table.put(put4);
Put put5 = new Put(Bytes.toBytes("row3"));
put5.add(Bytes.toBytes("colfam1"),Bytes.toBytes("col4"),Bytes.toBytes("val6"));
table.put(put5);
table.flushCommits();
table.setAutoFlush(true);
//原子性操作
boolean result = table.checkAndPut(Bytes.toBytes("row1"), Bytes.toBytes("colfam1"), Bytes.toBytes("col1"), null, put1);
System.out.println(result);
}
}
2、从表中获取值:
package hbaseTest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
public class GetTest
{
public static void main(String[] args) throws IOException
{
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf,"testtable");
//读取单行
Get get1 = new Get(Bytes.toBytes("row1"));
Result result1 = table.get(get1);
System.out.println(Bytes.toString(result1.getRow()) + " contains " + result1.size() + "KeyValue:");
for(KeyValue kv : result1.raw())
{
System.out.println(Bytes.toString(kv.getFamily()) + " " + Bytes.toString(kv.getQualifier()) +
" " + Bytes.toString(kv.getValue()));
}
//读取列表
List<Get> gets = new ArrayList<Get>();
Get get2 = new Get(Bytes.toBytes("row2"));
Get get3 = new Get(Bytes.toBytes("row3"));
gets.add(get2);
gets.add(get3);
Result[] results = table.get(gets);
for(Result result : results)
{
for(KeyValue kv : result.raw())
{
System.out.println(Bytes.toString(kv.getFamily()) + " " + Bytes.toString(kv.getQualifier()) +
" " + Bytes.toString(kv.getValue()));
}
}
//只获取一个单元格
Get get4 = new Get(Bytes.toBytes("row3"));
get4.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col3"));
Result result2 = table.get(get4);
System.out.println(result2.toString());
//获得本行或者前一行
Result result3 = table.getRowOrBefore(Bytes.toBytes("row4"), Bytes.toBytes("colfam1"));
for(KeyValue kv : result3.raw())
{
System.out.println(Bytes.toString(kv.getFamily()) + " " + Bytes.toString(kv.getQualifier()) +
" " + Bytes.toString(kv.getValue()));
}
}
}
3、删除表中的值:
package hbaseTest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.util.Bytes;
public class DeleteTest
{
public static void main(String[] args) throws IOException
{
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf,"testtable");
//整行删除
Delete delete1 = new Delete(Bytes.toBytes("row3"));
table.delete(delete1);
//单元格删除
Delete delete2 = new Delete(Bytes.toBytes("row2"));
delete2.deleteColumn(Bytes.toBytes("colfam1"),Bytes.toBytes("col1"));
table.delete(delete2);
//删除整个列族
Delete delete3 = new Delete(Bytes.toBytes("row2"));
delete3.deleteFamily(Bytes.toBytes("colfam1"));
//批量删除
List<Delete> deletes = new ArrayList<Delete>();
Delete delete4 = new Delete(Bytes.toBytes("row1"));
Delete delete5 = new Delete(Bytes.toBytes("row1"));
delete4.deleteColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col1"));
delete5.deleteColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col2"));
deletes.add(delete5);
deletes.add(delete4);
table.delete(deletes);
//原子性操作
Delete delete6 = new Delete(Bytes.toBytes("row1"));
delete6.deleteColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col5"));
boolean result = table.checkAndDelete(Bytes.toBytes("row1"), Bytes.toBytes("colfam1"),
Bytes.toBytes("col5"), Bytes.toBytes("val2"), delete6);
System.out.println("result = " + result);
}
}
4、批量操作:
package hbaseTest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.util.Bytes;
public class BatchTest
{
public static void main(String[] args) throws IOException
{
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf,"testtable");
List<Row> batch = new ArrayList<Row>();
Put put = new Put(Bytes.toBytes("row1"));
put.add(Bytes.toBytes("colfam1"),Bytes.toBytes("col1"),Bytes.toBytes("val8"));
batch.add(put);
Get get = new Get(Bytes.toBytes("row1"));
get.addColumn(Bytes.toBytes("colfam1"), Bytes.toBytes("col2"));
batch.add(get);
Delete delete = new Delete(Bytes.toBytes("row1"));
delete.deleteColumn(Bytes.toBytes("colfam1"),Bytes.toBytes("col5"));
batch.add(delete);
Object[] results = new Object[batch.size()];
try
{
table.batch(batch,results);
}catch(Exception e)
{
System.err.println("Error: " + e);
}
for(int i = 0;i < batch.size();i++)
{
System.out.println("results[" + i + "]" + results[i]);
}
}
}
5、扫描表(从某行到某行,不是整张表扫描):
package hbaseTest;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
public class ScanTest
{
public static void main(String[] args) throws IOException
{
Configuration conf = HBaseConfiguration.create();
HTable table = new HTable(conf,"testtable");
Scan scan1 = new Scan(Bytes.toBytes("row1"),Bytes.toBytes("row6"));
ResultScanner scanner = table.getScanner(scan1);
for(Result rst : scanner)
{
System.out.println(rst);
}
scanner.close();
}
}