import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.util.Bytes;
public class BatchExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
List<Row> rowList=new ArrayList<Row>();
Get get=new Get(Bytes.toBytes("row7"));
get.addColumn(Bytes.toBytes("family1"), Bytes.toBytes("column7"));
rowList.add(get);
Delete delete =new Delete(Bytes.toBytes("row1"));
rowList.add(delete);
Put put=new Put(Bytes.toBytes("row9"));
put.add(Bytes.toBytes("family1"), Bytes.toBytes("column6"), Bytes.toBytes("xxx"));
rowList.add(put);
Object[] result=new Object[rowList.size()];
table.batch(rowList, result);
for(Object ret:result){
System.out.println(ret);
}
table.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.util.Bytes;
public class CheckAndDeleteExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Delete delete=new Delete(Bytes.toBytes("row6"));
boolean ret=table.checkAndDelete(Bytes.toBytes("row6"), Bytes.toBytes("family1"), Bytes.toBytes("column11"),Bytes.toBytes("value8"), delete);
table.close();
System.out.println(ret);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class CheckAndSetExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Put put1=new Put(Bytes.toBytes("row6"));
put1.add(Bytes.toBytes("family1"), Bytes.toBytes("column11"), Bytes.toBytes("value8"));
table.checkAndPut(Bytes.toBytes("row6"), Bytes.toBytes("family1"), Bytes.toBytes("column6"), null, put1);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class ColumnCountFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Get get=new Get(Bytes.toBytes("row6"));
ColumnCountGetFilter filter=new ColumnCountGetFilter(2);
get.setFilter(filter);
Result result=table.get(get);
System.out.println(result);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.ColumnPaginationFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class ColumnPageFilterExample {
/**
* @param args
*/
public static void main(String[] args)throws Exception {
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
ColumnPaginationFilter filter=new ColumnPaginationFilter(1, 2);
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
for(KeyValue kv:result.raw()){
System.out.println(kv+"-----"+Bytes.toString(kv.getValue()));
}
}
resultScanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.ColumnPrefixFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class ColumnPrefixFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
ColumnPrefixFilter filter=new ColumnPrefixFilter(Bytes.toBytes("column6"));
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.util.Bytes;
public class DeleteExample {
/**
* @param args
*/
public static void main(String[] args)throws Exception {
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Delete delete=new Delete(Bytes.toBytes("row1"));
delete.deleteColumn(Bytes.toBytes("family1"), Bytes.toBytes("column2"),1);
table.delete(delete);
table.close();
}
}
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.util.Bytes;
public class DeleteListExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
List<Delete> deleteList=new ArrayList<Delete>();
Delete delete=new Delete(Bytes.toBytes("row3"));
deleteList.add(delete);
Delete delete2=new Delete(Bytes.toBytes("row4"));
delete2.deleteColumn(Bytes.toBytes("family1"), Bytes.toBytes("column5"));
deleteList.add(delete2);
table.delete(deleteList);
table.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.DependentColumnFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class DependentColumnFilterExample {
public static void filter(boolean drop,CompareOp op,ByteArrayComparable comparable)throws Exception{
Configuration conf= HBaseConfiguration.create();
HTable hbasetalbe=new HTable(conf,"testtable");
DependentColumnFilter filter ;
if(comparable==null){
filter=new DependentColumnFilter(Bytes.toBytes("family1"), Bytes.toBytes("column6"), drop);
}else{
filter=new DependentColumnFilter(Bytes.toBytes("family1"), Bytes.toBytes("column6"), drop,op,comparable);
}
Scan scan=new Scan();
scan.setFilter(filter);
ResultScanner resultScanner=hbasetalbe.getScanner(scan);
for(Result result:resultScanner){
for(KeyValue kv:result.raw()){
System.out.println(kv+"-------"+Bytes.toString(kv.getValue()));
}
}
resultScanner.close();
System.out.println("-----------------");
System.out.println("@@@@@@@@@@@@@@@@@@@@@@@");
}
/**
* @param args
*/
public static void main(String[] args)throws Exception {
filter(false,null,null);
filter(true,null,null);
filter(false,CompareOp.EQUAL,new BinaryPrefixComparator(Bytes.toBytes("value6")));
filter(true,CompareOp.EQUAL,new BinaryPrefixComparator(Bytes.toBytes("value6")));
}
}
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class ErrorPutExample {
/**
* @param args
*/
public static void main(String[] args)throws Exception {
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,Bytes.toBytes("testtable"));
List<Put> putList=new ArrayList<Put>();
Put put1=new Put(Bytes.toBytes("row9"));
put1.add(Bytes.toBytes("family1"), Bytes.toBytes("column6"), Bytes.toBytes("value6"));
putList.add(put1);
Put put2=new Put(Bytes.toBytes("row9"));
put2.add(Bytes.toBytes("errorfamily"), Bytes.toBytes("column7"), Bytes.toBytes("value7"));
putList.add(put2);
Put put3=new Put(Bytes.toBytes("row9"));
putList.add(put3);
try{
table.put(putList);
}catch(Exception e){
e.printStackTrace();
table.flushCommits();
}
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class FamilyFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
FamilyFilter familyFilter=new FamilyFilter(CompareOp.LESS, new BinaryComparator(Bytes.toBytes("family2")));
scan.setFilter(familyFilter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
System.out.println("-----------");
Get get=new Get(Bytes.toBytes("row7"));
get.setFilter(familyFilter);
Result result=table.get(get);
System.out.println(result);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class FirstKeyOnlyFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
FirstKeyOnlyFilter filter=new FirstKeyOnlyFilter();
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
for(KeyValue kv:result.raw()){
System.out.println(kv+"-----"+Bytes.toString(kv.getValue()));
}
}
resultScanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
public class FlushExample {
/**
* @param args
*/
public static void main(String[] args)throws Exception {
Configuration conf=HBaseConfiguration.create();
HTable htable=new HTable(conf,"testtable");
System.out.println("auto flush:"+htable.isAutoFlush());
htable.setAutoFlush(false);
Put put1=new Put(Bytes.toBytes("row2"));
put1.add(Bytes.toBytes("family1"), Bytes.toBytes("column3"), Bytes.toBytes("value3"));
htable.put(put1);
Put put2=new Put(Bytes.toBytes("row3"));
put2.add(Bytes.toBytes("family1"), Bytes.toBytes("column4"), Bytes.toBytes("value4"));
htable.put(put2);
Put put3=new Put(Bytes.toBytes("row4"));
put3.add(Bytes.toBytes("family1"), Bytes.toBytes("column5"), Bytes.toBytes("value5"));
htable.put(put3);
Get get=new Get(Bytes.toBytes("row3"));
Result result=htable.get(get);
System.out.println(result);
htable.flushCommits();
result=htable.get(get);
System.out.println(result);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
public class GetExample {
/**
* @param args
*/
public static void main(String[] args)throws Exception {
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Get get=new Get(Bytes.toBytes("row1"));
get.addColumn(Bytes.toBytes("family1"), Bytes.toBytes("column1"));
Result result=table.get(get);
byte[] value=result.getValue(Bytes.toBytes("family1"), Bytes.toBytes("column1"));
System.out.println(Bytes.toString(value));
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
public class GetRowOrBeforeExample {
/**
* @param args
*/
public static void main(String[] args)throws Exception {
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Result result=table.getRowOrBefore(Bytes.toBytes("row1"), Bytes.toBytes("family1"));
System.out.println(Bytes.toString(result.getRow()));
Result result2=table.getRowOrBefore(Bytes.toBytes("row99"),Bytes.toBytes("family1"));
System.out.println(result2);
Result result3=table.getRowOrBefore(Bytes.toBytes("abc"), Bytes.toBytes("family1"));
System.out.println(result3);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.InclusiveStopFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class InclusiveStopFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
scan.setStartRow(Bytes.toBytes("row7"));
InclusiveStopFilter filter=new InclusiveStopFilter(Bytes.toBytes("row9"));
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
for(KeyValue kv:result.raw()){
System.out.println(kv+"-----"+Bytes.toString(kv.getValue()));
}
}
resultScanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class KeyOnlyFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
KeyOnlyFilter filter=new KeyOnlyFilter(true);
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
for(KeyValue kv:result.raw()){
System.out.println(kv+"-----"+Bytes.toString(kv.getValue()));
}
}
resultScanner.close();
}
}
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
public class ListGetExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
List<Get> getList=new ArrayList<Get>();
Get get1=new Get(Bytes.toBytes("row1"));
get1.addFamily(Bytes.toBytes("family1"));
getList.add(get1);
Get get2=new Get(Bytes.toBytes("row2"));
getList.add(get2);
Result[] results=table.get(getList);
for(Result result:results){
if(result.containsColumn(Bytes.toBytes("family1"), Bytes.toBytes("column1"))){
System.out.println("row:"+Bytes.toString(result.getRow()));
System.out.println("value:"+Bytes.toString(result.getValue(Bytes.toBytes("family1"), Bytes.toBytes("column1"))));
}
if(result.containsColumn(Bytes.toBytes("family1"), Bytes.toBytes("column2"))){
System.out.println("row:"+Bytes.toString(result.getRow()));
System.out.println("value:"+Bytes.toString(result.getValue(Bytes.toBytes("family1"), Bytes.toBytes("column2"))));
}
}
for(Result result:results){
for(KeyValue keyValue:result.raw()){
System.out.println("row:"+Bytes.toString(keyValue.getRow()));
System.out.println("value:"+Bytes.toString(keyValue.getValue()));
}
}
}
}
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class ListPutExample {
/**
* @param args
*/
public static void main(String[] args)throws Exception {
Configuration conf=HBaseConfiguration.create();
//HBaseAdmin admin=new HBaseAdmin(conf);
HTable table=new HTable(conf, "testtable");
List<Put> putList=new ArrayList<Put>();
Put put1=new Put(Bytes.toBytes("row6"));
put1.add(Bytes.toBytes("family1"), Bytes.toBytes("column6"), Bytes.toBytes("value6"));
putList.add(put1);
Put put2=new Put(Bytes.toBytes("row6"));
put2.add(Bytes.toBytes("family1"), Bytes.toBytes("column7"), Bytes.toBytes("value7"));
putList.add(put2);
Put put3=new Put(Bytes.toBytes("row6"));
put3.add(Bytes.toBytes("family1"), Bytes.toBytes("column8"), Bytes.toBytes("value8"));
putList.add(put3);
table.put(putList);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.PageFilter;
//Administrator
public class PageFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
PageFilter filter=new PageFilter(1);
scan.setFilter(filter);
ResultScanner scanner=table.getScanner(scan);
for(Result result:scanner){
System.out.println(result);
}
scanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class PrefixFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
PrefixFilter filter=new PrefixFilter(Bytes.toBytes("row6"));
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
import org.codehaus.jackson.sym.BytesToNameCanonicalizer;
public class PutExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable htable=new HTable(conf, "testtable");
Put put=new Put(Bytes.toBytes("row1"));
put.add(Bytes.toBytes("family1"), Bytes.toBytes("column1"), Bytes.toBytes("value1"));
put.add(Bytes.toBytes("family1"), Bytes.toBytes("column2"), 1000, Bytes.toBytes("value2"));
htable.put(put);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.QualifierFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class QualifierFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
QualifierFilter filter=new QualifierFilter(CompareOp.LESS, new BinaryComparator(Bytes.toBytes("column9")));
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
System.out.println("------------");
Get get=new Get(Bytes.toBytes("row7"));
get.setFilter(filter);
Result result=table.get(get);
System.out.println(result);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.RandomRowFilter;
//Administrator
public class RandomRowFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
RandomRowFilter filter=new RandomRowFilter(0.8f);
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class RowFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable hbasetalbe=new HTable(conf, "testtable");
Scan scan1=new Scan();
RowFilter rowFileter=new RowFilter(CompareOp.LESS, new BinaryComparator(Bytes.toBytes("row9")));
scan1.setFilter(rowFileter);
ResultScanner resultScanner=hbasetalbe.getScanner(scan1);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
System.out.println("-----------------");
Scan scan2=new Scan();
RowFilter rowFilter2=new RowFilter(CompareOp.EQUAL, new RegexStringComparator(".*"));
scan2.setFilter(rowFilter2);
resultScanner=hbasetalbe.getScanner(scan2);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
System.out.println("-----------------");
Scan scan3=new Scan();
RowFilter rowFilter3=new RowFilter(CompareOp.EQUAL, new SubstringComparator("9"));
scan3.setFilter(rowFilter3);
resultScanner=hbasetalbe.getScanner(scan3);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
public class ScanCacheAndBatchExample {
public static void scan(int cache,int batch)throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
scan.setCaching(cache);
scan.setBatch(batch);
ResultScanner resultScan=table.getScanner(scan);
int i=0;
for(Result result:resultScan){
i++;
}
System.out.println(i);
}
/**
* @param args
*/
public static void main(String[] args)throws Exception {
scan(2,1);
scan(1,2);
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
public class ScanExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan1=new Scan();
scan1.setStartRow(Bytes.toBytes("row9"));
ResultScanner resultScanner=table.getScanner(scan1);
for(Result result:resultScanner){
System.out.println(result);
}
Scan scan2=new Scan();
scan2.addColumn(Bytes.toBytes("family1"), Bytes.toBytes("column6"));
resultScanner=table.getScanner(scan2);
for(Result result:resultScanner){
System.out.println(result);
}
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class SingleColumnValueExcludeFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
SingleColumnValueExcludeFilter filter=new SingleColumnValueExcludeFilter(Bytes.toBytes("family1"), Bytes.toBytes("column6"), CompareOp.EQUAL,new SubstringComparator("value"));
filter.setFilterIfMissing(true);
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
for(KeyValue kv:result.raw()){
System.out.println(kv+"----"+Bytes.toString(kv.getValue()));
}
}
resultScanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class SingleColumnValueFilterExample {
/**
* @param args
*/
public static void main(String[] args)throws Exception {
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
SingleColumnValueFilter filter=new SingleColumnValueFilter(Bytes.toBytes("family1"), Bytes.toBytes("column6"), CompareOp.EQUAL,new SubstringComparator("x"));
filter.setFilterIfMissing(true);
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
for(KeyValue kv:result.raw()){
System.out.println(kv+"----"+Bytes.toString(kv.getValue()));
}
}
resultScanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.SkipFilter;
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class SkipFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
ValueFilter valueFilter=new ValueFilter(CompareOp.NOT_EQUAL, new BinaryComparator(Bytes.toBytes("value8")));
Scan scan=new Scan();
scan.setFilter(valueFilter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
System.out.println("-------------");
SkipFilter skipFilter=new SkipFilter(valueFilter);
scan.setFilter(skipFilter);
resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.util.Bytes;
public class TableExample {
private static void createTable1()throws Exception{
Configuration con=HBaseConfiguration.create();
HBaseAdmin admin=new HBaseAdmin(con);
HTableDescriptor desc=new HTableDescriptor("testtable");
admin.createTable(desc);
admin.close();
}
private static void createTable2()throws Exception{
Configuration con=HBaseConfiguration.create();
HBaseAdmin admin=new HBaseAdmin(con);
if(admin.tableExists("testtable")){
admin.disableTable("testtable");
admin.deleteTable("testtable");
}
HTableDescriptor desc=new HTableDescriptor("testtable");
HColumnDescriptor colDesc=new HColumnDescriptor(Bytes.toBytes("family1"));
desc.addFamily(colDesc);
admin.createTable(desc);
admin.close();
}
/**
* @param args
*/
public static void main(String[] args)throws Exception {
createTable2();
}
}
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.TimestampsFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class TimeStampsFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
List<Long> timeList=new ArrayList<Long>();
timeList.add(1393152694859L);
timeList.add(1L);
TimestampsFilter filter=new TimestampsFilter(timeList);
//scan.setTimeRange(2, 6);
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
for(KeyValue kv:result.raw()){
System.out.println(kv+"-----"+Bytes.toString(kv.getValue()));
}
}
resultScanner.close();
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class ValueFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
Scan scan=new Scan();
ValueFilter filter=new ValueFilter(CompareOp.EQUAL, new RegexStringComparator("xxx"));
scan.setFilter(filter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
for(KeyValue keyValue:result.raw()){
System.out.println(Bytes.toString(keyValue.getValue()));
}
}
resultScanner.close();
System.out.println("-------------");
Get get=new Get(Bytes.toBytes("row9"));
get.setFilter(filter);
Result result=table.get(get);
for(KeyValue kv:result.raw()){
System.out.println(Bytes.toString(kv.getValue()));
}
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
import org.apache.hadoop.hbase.util.Bytes;
//Administrator
public class WhileMatchFilterExample {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
Configuration conf=HBaseConfiguration.create();
HTable table=new HTable(conf,"testtable");
RowFilter rowFilter=new RowFilter(CompareOp.EQUAL,new BinaryComparator(Bytes.toBytes("row7")) );
Scan scan=new Scan();
scan.setFilter(rowFilter);
ResultScanner resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
System.out.println("----------------------");
WhileMatchFilter matchFilter=new WhileMatchFilter(rowFilter);
scan.setFilter(matchFilter);
resultScanner=table.getScanner(scan);
for(Result result:resultScanner){
System.out.println(result);
}
resultScanner.close();
}
}