java. scannel_Java Scan.setStartRow方法代碼示例

本文整理匯總了Java中org.apache.hadoop.hbase.client.Scan.setStartRow方法的典型用法代碼示例。如果您正苦於以下問題:Java Scan.setStartRow方法的具體用法?Java Scan.setStartRow怎麽用?Java Scan.setStartRow使用的例子?那麽恭喜您, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hbase.client.Scan的用法示例。

在下文中一共展示了Scan.setStartRow方法的20個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於我們的係統推薦出更棒的Java代碼示例。

示例1: testAvgWithInvalidRange

​點讚 3

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

@Test (timeout=300000)

public void testAvgWithInvalidRange() {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addColumn(TEST_FAMILY,TEST_QUALIFIER);

scan.setStartRow(ROWS[5]);

scan.setStopRow(ROWS[1]);

final ColumnInterpreter ci =

new LongColumnInterpreter();

Double avg = null;

try {

avg = aClient.avg(TEST_TABLE, ci, scan);

} catch (Throwable e) {

}

assertEquals(null, avg);// control should go to the catch block

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:17,

示例2: testMinWithInvalidRange2

​點讚 3

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

@Test(timeout = 300000)

public void testMinWithInvalidRange2() {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addFamily(TEST_FAMILY);

scan.setStartRow(ROWS[6]);

scan.setStopRow(ROWS[6]);

final ColumnInterpreter ci =

new DoubleColumnInterpreter();

Double min = null;

try {

min = aClient.min(TEST_TABLE, ci, scan);

} catch (Throwable e) {

}

assertEquals(null, min);// control should go to the catch block

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:17,

示例3: testAvgWithInvalidRange

​點讚 3

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

@Test (timeout=300000)

public void testAvgWithInvalidRange() {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);

scan.setStartRow(ROWS[5]);

scan.setStopRow(ROWS[1]);

final ColumnInterpreter ci =

new BigDecimalColumnInterpreter();

Double avg = null;

try {

avg = aClient.avg(TEST_TABLE, ci, scan);

} catch (Throwable e) {

}

assertEquals(null, avg);// control should go to the catch block

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:17,

示例4: testMaxWithInvalidRange2

​點讚 3

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

@Test (timeout=300000)

public void testMaxWithInvalidRange2() throws Throwable {

BigDecimal max = new BigDecimal(Long.MIN_VALUE);

Scan scan = new Scan();

scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);

scan.setStartRow(ROWS[4]);

scan.setStopRow(ROWS[4]);

try {

AggregationClient aClient = new AggregationClient(conf);

final ColumnInterpreter ci =

new BigDecimalColumnInterpreter();

max = aClient.max(TEST_TABLE, ci, scan);

} catch (Exception e) {

max = BigDecimal.ZERO;

}

assertEquals(BigDecimal.ZERO, max);// control should go to the catch block

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:18,

示例5: findStartNode

​點讚 3

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

private static CINode findStartNode(Table table, byte[] startKey) throws IOException {

Scan scan = new Scan();

scan.setStartRow(startKey);

scan.setBatch(1);

scan.addColumn(FAMILY_NAME, COLUMN_PREV);

long t1 = System.currentTimeMillis();

ResultScanner scanner = table.getScanner(scan);

Result result = scanner.next();

long t2 = System.currentTimeMillis();

scanner.close();

if ( result != null) {

CINode node = getCINode(result, new CINode());

System.out.printf("FSR %d %s\n", t2 - t1, Bytes.toStringBinary(node.key));

return node;

}

System.out.println("FSR " + (t2 - t1));

return null;

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:23,

示例6: testMinWithValidRange

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

/**

* @throws Throwable

*/

@Test (timeout=300000)

public void testMinWithValidRange() throws Throwable {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);

scan.setStartRow(HConstants.EMPTY_START_ROW);

scan.setStopRow(HConstants.EMPTY_END_ROW);

final ColumnInterpreter ci =

new LongColumnInterpreter();

Long min = aClient.min(TEST_TABLE, ci,

scan);

assertEquals(0l, min.longValue());

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:17,

示例7: testStdWithValidRange2WithNoCQ

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

@Test (timeout=300000)

public void testStdWithValidRange2WithNoCQ() throws Throwable {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addFamily(TEST_FAMILY);

scan.setStartRow(ROWS[6]);

scan.setStopRow(ROWS[7]);

final ColumnInterpreter ci =

new BigDecimalColumnInterpreter();

double std = aClient.std(TEST_TABLE, ci, scan);

System.out.println("std is:" + std);

assertEquals(0, std, 0.05d);

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:14,

示例8: testSumWithValidRange2

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

/**

* @throws Throwable

*/

@Test (timeout=300000)

public void testSumWithValidRange2() throws Throwable {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addColumn(TEST_FAMILY,TEST_QUALIFIER);

scan.setStartRow(ROWS[5]);

scan.setStopRow(ROWS[15]);

final ColumnInterpreter ci =

new LongColumnInterpreter();

long sum = aClient.sum(TEST_TABLE, ci, scan);

assertEquals(95, sum);

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:16,

示例9: testMaxWithValidRange2WithNoCQ

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

@Test (timeout=300000)

public void testMaxWithValidRange2WithNoCQ() throws Throwable {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addFamily(TEST_FAMILY);

scan.setStartRow(ROWS[6]);

scan.setStopRow(ROWS[7]);

final ColumnInterpreter ci =

new BigDecimalColumnInterpreter();

BigDecimal max = aClient.max(TEST_TABLE, ci, scan);

assertEquals(new BigDecimal("6.00"), max);

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:13,

示例10: getRowOrBefore

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

private Result getRowOrBefore(byte[] tableName, byte[] row, byte[] family) throws IOException {

Scan scan = new Scan(row);

scan.setReversed(true);

scan.addFamily(family);

scan.setStartRow(row);

Table table = getTable(tableName);

try (ResultScanner scanner = table.getScanner(scan)) {

return scanner.next();

} finally{

if(table != null){

table.close();

}

}

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:15,

示例11: testMaxWithValidRange2WithNoCQ

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

@Test(timeout = 300000)

public void testMaxWithValidRange2WithNoCQ() throws Throwable {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addFamily(TEST_FAMILY);

scan.setStartRow(ROWS[6]);

scan.setStopRow(ROWS[7]);

final ColumnInterpreter ci =

new DoubleColumnInterpreter();

double max = aClient.max(TEST_TABLE, ci, scan);

assertEquals(6.00, max, 0.00);

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:13,

示例12: testMinWithValidRange2

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

/**

* @throws Throwable

*/

@Test (timeout=300000)

public void testMinWithValidRange2() throws Throwable {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);

scan.setStartRow(ROWS[5]);

scan.setStopRow(ROWS[15]);

final ColumnInterpreter ci =

new LongColumnInterpreter();

long min = aClient.min(TEST_TABLE, ci, scan);

assertEquals(5, min);

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:16,

示例13: testMaxWithValidRange2

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

/**

* @throws Throwable

*/

@Test (timeout=300000)

public void testMaxWithValidRange2() throws Throwable {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);

scan.setStartRow(ROWS[5]);

scan.setStopRow(ROWS[15]);

final ColumnInterpreter ci =

new LongColumnInterpreter();

long max = aClient.max(TEST_TABLE, ci, scan);

assertEquals(14, max);

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:16,

示例14: testAvgWithValidRange2

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

/**

* @throws Throwable

*/

@Test (timeout=300000)

public void testAvgWithValidRange2() throws Throwable {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addColumn(TEST_FAMILY,TEST_QUALIFIER);

scan.setStartRow(ROWS[5]);

scan.setStopRow(ROWS[15]);

final ColumnInterpreter ci =

new LongColumnInterpreter();

double avg = aClient.avg(TEST_TABLE, ci, scan);

assertEquals(9.5, avg, 0);

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:16,

示例15: testStdWithValidRange2WithNoCQ

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

@Test(timeout = 300000)

public void testStdWithValidRange2WithNoCQ() throws Throwable {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addFamily(TEST_FAMILY);

scan.setStartRow(ROWS[6]);

scan.setStopRow(ROWS[7]);

final ColumnInterpreter ci =

new DoubleColumnInterpreter();

double std = aClient.std(TEST_TABLE, ci, scan);

System.out.println("std is:" + std);

assertEquals(0, std, 0.05d);

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:14,

示例16: testMinWithValidRange2WithNoCQ

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

@Test (timeout=300000)

public void testMinWithValidRange2WithNoCQ() throws Throwable {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addFamily(TEST_FAMILY);

scan.setStartRow(ROWS[6]);

scan.setStopRow(ROWS[7]);

final ColumnInterpreter ci =

new LongColumnInterpreter();

long min = aClient.min(TEST_TABLE, ci, scan);

assertEquals(6, min);

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:13,

示例17: createGCScanner

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

private GCScanner createGCScanner(ScanRange selectedRange) throws IOException {

List list = new ArrayList<>(rangeList.getRanges());

list.remove(selectedRange);

Scan scan = new Scan();

scan.setStartRow(selectedRange.getStart());

scan.setStopRow(selectedRange.getStop());

scan.setCaching(rawScan.getCaching());

scan.setCacheBlocks(rawScan.getCacheBlocks());

scan.setFilter(new ScanRange.ScanRangeList(list).toFilterList());

Table table = conn.getTable(

relation.getIndexTableName(selectedRange.getFamily(), selectedRange.getQualifier()));

ResultScanner scanner = table.getScanner(scan);

return new GCScanner(this, scanner, selectedRange.getFamily(), selectedRange.getQualifier());

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:15,

示例18: testSumWithValidRange2

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

/**

* @throws Throwable

*/

@Test (timeout=300000)

public void testSumWithValidRange2() throws Throwable {

AggregationClient aClient = new AggregationClient(conf);

Scan scan = new Scan();

scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);

scan.setStartRow(ROWS[5]);

scan.setStopRow(ROWS[15]);

final ColumnInterpreter ci =

new BigDecimalColumnInterpreter();

BigDecimal sum = aClient.sum(TEST_TABLE, ci, scan);

assertEquals(new BigDecimal("95.00"), sum);

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:16,

示例19: testScan

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

private void testScan(final int[] columnArr, final boolean lazySeekEnabled,

final int startRow, final int endRow, int maxVersions)

throws IOException {

StoreScanner.enableLazySeekGlobally(lazySeekEnabled);

final Scan scan = new Scan();

final Set qualSet = new HashSet();

for (int iColumn : columnArr) {

String qualStr = getQualStr(iColumn);

scan.addColumn(FAMILY_BYTES, Bytes.toBytes(qualStr));

qualSet.add(qualStr);

}

scan.setMaxVersions(maxVersions);

scan.setStartRow(rowBytes(startRow));

// Adjust for the fact that for multi-row queries the end row is exclusive.

{

final byte[] scannerStopRow =

rowBytes(endRow + (startRow != endRow ? 1 : 0));

scan.setStopRow(scannerStopRow);

}

final long initialSeekCount = StoreFileScanner.getSeekCount();

final InternalScanner scanner = region.getScanner(scan);

final List results = new ArrayList();

final List actualKVs = new ArrayList();

// Such a clumsy do-while loop appears to be the official way to use an

// internalScanner. scanner.next() return value refers to the _next_

// result, not to the one already returned in results.

boolean hasNext;

do {

hasNext = scanner.next(results);

actualKVs.addAll(results);

results.clear();

} while (hasNext);

List filteredKVs = filterExpectedResults(qualSet,

rowBytes(startRow), rowBytes(endRow), maxVersions);

final String rowRestrictionStr =

(startRow == -1 && endRow == -1) ? "all rows" : (

startRow == endRow ? ("row=" + startRow) : ("startRow="

+ startRow + ", " + "endRow=" + endRow));

final String columnRestrictionStr =

columnArr.length == 0 ? "all columns"

: ("columns=" + Arrays.toString(columnArr));

final String testDesc =

"Bloom=" + bloomType + ", compr=" + comprAlgo + ", "

+ (scan.isGetScan() ? "Get" : "Scan") + ": "

+ columnRestrictionStr + ", " + rowRestrictionStr

+ ", maxVersions=" + maxVersions + ", lazySeek=" + lazySeekEnabled;

long seekCount = StoreFileScanner.getSeekCount() - initialSeekCount;

if (VERBOSE) {

System.err.println("Seek count: " + seekCount + ", KVs returned: "

+ actualKVs.size() + ". " + testDesc +

(lazySeekEnabled ? "\n" : ""));

}

if (lazySeekEnabled) {

totalSeekLazy += seekCount;

} else {

totalSeekDiligent += seekCount;

}

assertKVListsEqual(testDesc, filteredKVs, actualKVs);

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:64,

示例20: run

​點讚 2

import org.apache.hadoop.hbase.client.Scan; //導入方法依賴的package包/類

@Override

public int run(String[] args) throws Exception {

Options options = new Options();

options.addOption("s", "start", true, "start key");

options.addOption("e", "end", true, "end key");

options.addOption("l", "limit", true, "number to print");

GnuParser parser = new GnuParser();

CommandLine cmd = null;

try {

cmd = parser.parse(options, args);

if (cmd.getArgs().length != 0) {

throw new ParseException("Command takes no arguments");

}

} catch (ParseException e) {

System.err.println("Failed to parse command line " + e.getMessage());

System.err.println();

HelpFormatter formatter = new HelpFormatter();

formatter.printHelp(getClass().getSimpleName(), options);

System.exit(-1);

}

Table table = new HTable(getConf(), getTableName(getConf()));

Scan scan = new Scan();

scan.setBatch(10000);

if (cmd.hasOption("s"))

scan.setStartRow(Bytes.toBytesBinary(cmd.getOptionValue("s")));

if (cmd.hasOption("e"))

scan.setStopRow(Bytes.toBytesBinary(cmd.getOptionValue("e")));

int limit = 0;

if (cmd.hasOption("l"))

limit = Integer.parseInt(cmd.getOptionValue("l"));

else

limit = 100;

ResultScanner scanner = table.getScanner(scan);

CINode node = new CINode();

Result result = scanner.next();

int count = 0;

while (result != null && count++ < limit) {

node = getCINode(result, node);

System.out.printf("%s:%s:%012d:%s\n", Bytes.toStringBinary(node.key),

Bytes.toStringBinary(node.prev), node.count, node.client);

result = scanner.next();

}

scanner.close();

table.close();

return 0;

}

開發者ID:fengchen8086,項目名稱:ditb,代碼行數:56,

注:本文中的org.apache.hadoop.hbase.client.Scan.setStartRow方法示例整理自Github/MSDocs等源碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值