1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.client;
19
20 import static org.apache.hadoop.hbase.client.ConnectionUtils.createClosestRowAfter;
21 import static org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStartRow;
22 import static org.apache.hadoop.hbase.client.ConnectionUtils.noMoreResultsForScan;
23
24 import java.io.IOException;
25 import java.util.concurrent.ExecutorService;
26
27 import org.apache.hadoop.conf.Configuration;
28 import org.apache.hadoop.hbase.TableName;
29 import org.apache.hadoop.hbase.classification.InterfaceAudience;
30 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
31
32
33
34
35
36 @InterfaceAudience.Private
37 public class ClientSimpleScanner extends ClientScanner {
38 public ClientSimpleScanner(Configuration configuration, Scan scan, TableName name,
39 ClusterConnection connection, RpcRetryingCallerFactory rpcCallerFactory,
40 RpcControllerFactory rpcControllerFactory, ExecutorService pool,
41 int replicaCallTimeoutMicroSecondScan) throws IOException {
42 super(configuration, scan, name, connection, rpcCallerFactory, rpcControllerFactory, pool,
43 replicaCallTimeoutMicroSecondScan);
44 }
45
46 @Override
47 protected boolean setNewStartKey() {
48 if (noMoreResultsForScan(scan, currentRegion)) {
49 return false;
50 }
51 scan.withStartRow(currentRegion.getEndKey(), true);
52 return true;
53 }
54
55 @Override
56 protected ScannerCallable createScannerCallable() {
57 if (!scan.includeStartRow() && !isEmptyStartRow(scan.getStartRow())) {
58
59
60 scan.withStartRow(createClosestRowAfter(scan.getStartRow()), true);
61 }
62 return new ScannerCallable(getConnection(), getTable(), scan, this.scanMetrics,
63 this.rpcControllerFactory);
64 }
65 }