1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.mapreduce;
19
20 import static org.junit.Assert.assertEquals;
21
22 import java.io.IOException;
23 import java.util.ArrayList;
24 import java.util.List;
25
26 import org.apache.hadoop.conf.Configuration;
27 import org.apache.hadoop.hbase.HBaseTestingUtility;
28 import org.apache.hadoop.hbase.HConstants;
29 import org.apache.hadoop.hbase.HTestConst;
30 import org.apache.hadoop.hbase.KeyValue;
31 import org.apache.hadoop.hbase.TableName;
32 import org.apache.hadoop.hbase.client.Connection;
33 import org.apache.hadoop.hbase.client.ConnectionFactory;
34 import org.apache.hadoop.hbase.client.Put;
35 import org.apache.hadoop.hbase.client.Scan;
36 import org.apache.hadoop.hbase.client.Table;
37 import org.apache.hadoop.hbase.regionserver.StoreScanner;
38 import org.apache.hadoop.hbase.testclassification.MediumTests;
39 import org.apache.hadoop.hbase.util.Bytes;
40 import org.junit.AfterClass;
41 import org.junit.BeforeClass;
42 import org.junit.Test;
43 import org.junit.experimental.categories.Category;
44
45 @Category(MediumTests.class)
46 public class TestTableRecordReader {
47 private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
48
49 private static TableName TABLE_NAME = TableName.valueOf("TestTableRecordReader");
50
51 private static int NUM_ROWS = 5;
52 private static byte[] ROW = Bytes.toBytes("testRow");
53 private static byte[][] ROWS = HTestConst.makeNAscii(ROW, NUM_ROWS);
54
55 private static int NUM_FAMILIES = 2;
56 private static byte[] FAMILY = Bytes.toBytes("testFamily");
57 private static byte[][] FAMILIES = HTestConst.makeNAscii(FAMILY, NUM_FAMILIES);
58
59 private static int NUM_QUALIFIERS = 2;
60 private static byte[] QUALIFIER = Bytes.toBytes("testQualifier");
61 private static byte[][] QUALIFIERS = HTestConst.makeNAscii(QUALIFIER, NUM_QUALIFIERS);
62
63 private static int VALUE_SIZE = 10;
64 private static byte[] VALUE = Bytes.createMaxByteArray(VALUE_SIZE);
65
66 private static final int TIMEOUT = 4000;
67
68 @BeforeClass
69 public static void setUpBeforeClass() throws Exception {
70 Configuration conf = TEST_UTIL.getConfiguration();
71
72 conf.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, TIMEOUT);
73 conf.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, TIMEOUT);
74
75
76 conf.setLong(StoreScanner.HBASE_CELLS_SCANNED_PER_HEARTBEAT_CHECK, 1);
77 TEST_UTIL.startMiniCluster(1);
78
79 createTestTable(TABLE_NAME, ROWS, FAMILIES, QUALIFIERS, VALUE);
80 }
81
82 private static void createTestTable(TableName name, byte[][] rows, byte[][] families,
83 byte[][] qualifiers, byte[] cellValue) throws IOException {
84 TEST_UTIL.createTable(name, families).put(createPuts(rows, families, qualifiers, cellValue));
85 }
86
87 private static List<Put> createPuts(byte[][] rows, byte[][] families, byte[][] qualifiers,
88 byte[] value) throws IOException {
89 List<Put> puts = new ArrayList<>();
90 for (int row = 0; row < rows.length; row++) {
91 Put put = new Put(rows[row]);
92 for (int fam = 0; fam < families.length; fam++) {
93 for (int qual = 0; qual < qualifiers.length; qual++) {
94 KeyValue kv = new KeyValue(rows[row], families[fam], qualifiers[qual], qual, value);
95 put.add(kv);
96 }
97 }
98 puts.add(put);
99 }
100 return puts;
101 }
102
103 @AfterClass
104 public static void tearDownAfterClass() throws Exception {
105 TEST_UTIL.shutdownMiniCluster();
106 }
107
108 @Test
109 public void test() throws Exception {
110 try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration());
111 Table table = conn.getTable(TABLE_NAME)) {
112 org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl trr =
113 new org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl();
114 Scan scan =
115 new Scan().setMaxResultSize(1).setCaching(Integer.MAX_VALUE).setNeedCursorResult(true);
116 trr.setScan(scan);
117 trr.setHTable(table);
118 trr.initialize(null, null);
119 int num = 0;
120 while (trr.nextKeyValue()) {
121 num++;
122 }
123 assertEquals(NUM_ROWS * NUM_FAMILIES * NUM_QUALIFIERS, num);
124 }
125 }
126 }