1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.mapreduce;
20
21 import static org.junit.Assert.assertArrayEquals;
22 import static org.junit.Assert.assertEquals;
23 import static org.junit.Assert.assertTrue;
24 import static org.junit.Assert.fail;
25 import java.io.IOException;
26 import java.util.Locale;
27 import java.util.TreeMap;
28 import org.apache.hadoop.conf.Configuration;
29 import org.apache.hadoop.fs.FSDataOutputStream;
30 import org.apache.hadoop.fs.FileStatus;
31 import org.apache.hadoop.fs.FileSystem;
32 import org.apache.hadoop.fs.Path;
33 import org.apache.hadoop.hbase.HBaseTestingUtility;
34 import org.apache.hadoop.hbase.HColumnDescriptor;
35 import org.apache.hadoop.hbase.HConstants;
36 import org.apache.hadoop.hbase.HTableDescriptor;
37 import org.apache.hadoop.hbase.NamespaceDescriptor;
38 import org.apache.hadoop.hbase.TableName;
39 import org.apache.hadoop.hbase.TableNotFoundException;
40 import org.apache.hadoop.hbase.client.Connection;
41 import org.apache.hadoop.hbase.client.ConnectionFactory;
42 import org.apache.hadoop.hbase.client.HTable;
43 import org.apache.hadoop.hbase.client.Table;
44 import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags;
45 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
46 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
47 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
48 import org.apache.hadoop.hbase.io.hfile.HFile;
49 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
50 import org.apache.hadoop.hbase.regionserver.BloomType;
51 import org.apache.hadoop.hbase.security.SecureBulkLoadUtil;
52 import org.apache.hadoop.hbase.testclassification.LargeTests;
53 import org.apache.hadoop.hbase.util.Bytes;
54 import org.apache.hadoop.hbase.util.HFileTestUtil;
55 import org.junit.AfterClass;
56 import org.junit.BeforeClass;
57 import org.junit.Rule;
58 import org.junit.Test;
59 import org.junit.experimental.categories.Category;
60 import org.junit.rules.TestName;
61
62
63
64
65
66
67 @Category(LargeTests.class)
68 public class TestLoadIncrementalHFiles {
69 @Rule
70 public TestName tn = new TestName();
71
72 private static final byte[] QUALIFIER = Bytes.toBytes("myqual");
73 private static final byte[] FAMILY = Bytes.toBytes("myfam");
74 private static final String NAMESPACE = "bulkNS";
75
76 static final String EXPECTED_MSG_FOR_NON_EXISTING_FAMILY = "Unmatched family names found";
77 static final int MAX_FILES_PER_REGION_PER_FAMILY = 4;
78
79 private static final byte[][] SPLIT_KEYS = new byte[][] {
80 Bytes.toBytes("ddd"),
81 Bytes.toBytes("ppp")
82 };
83
84 static HBaseTestingUtility util = new HBaseTestingUtility();
85
86 @BeforeClass
87 public static void setUpBeforeClass() throws Exception {
88 util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,"");
89 util.getConfiguration().setInt(
90 LoadIncrementalHFiles.MAX_FILES_PER_REGION_PER_FAMILY,
91 MAX_FILES_PER_REGION_PER_FAMILY);
92
93 util.getConfiguration().set(HConstants.RPC_CODEC_CONF_KEY,
94 KeyValueCodecWithTags.class.getCanonicalName());
95 util.startMiniCluster();
96
97 setupNamespace();
98 }
99
100 protected static void setupNamespace() throws Exception {
101 util.getHBaseAdmin().createNamespace(NamespaceDescriptor.create(NAMESPACE).build());
102 }
103
104 @AfterClass
105 public static void tearDownAfterClass() throws Exception {
106 util.shutdownMiniCluster();
107 }
108
109
110
111
112
113 @Test(timeout = 120000)
114 public void testSimpleLoad() throws Exception {
115 runTest("testSimpleLoad", BloomType.NONE,
116 new byte[][][] {
117 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") },
118 new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") },
119 }, 2);
120 }
121
122
123
124
125
126 @Test(timeout = 120000)
127 public void testRegionCrossingLoad() throws Exception {
128 runTest("testRegionCrossingLoad", BloomType.NONE,
129 new byte[][][] {
130 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
131 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
132 }, 2);
133 }
134
135
136
137
138 @Test(timeout = 60000)
139 public void testRegionCrossingRowBloom() throws Exception {
140 runTest("testRegionCrossingLoadRowBloom", BloomType.ROW,
141 new byte[][][] {
142 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
143 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
144 }, 2);
145 }
146
147
148
149
150 @Test(timeout = 120000)
151 public void testRegionCrossingRowColBloom() throws Exception {
152 runTest("testRegionCrossingLoadRowColBloom", BloomType.ROWCOL,
153 new byte[][][] {
154 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
155 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
156 }, 2);
157 }
158
159
160
161
162
163 @Test(timeout = 120000)
164 public void testSimpleHFileSplit() throws Exception {
165 runTest("testHFileSplit", BloomType.NONE,
166 new byte[][] {
167 Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"),
168 Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"),
169 },
170 new byte[][][] {
171 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("lll") },
172 new byte[][]{ Bytes.toBytes("mmm"), Bytes.toBytes("zzz") },
173 }, 2);
174 }
175
176
177
178
179
180 @Test(timeout = 80000)
181 public void testRegionCrossingHFileSplit() throws Exception {
182 testRegionCrossingHFileSplit(BloomType.NONE);
183 }
184
185
186
187
188
189 @Test(timeout = 120000)
190 public void testRegionCrossingHFileSplitRowBloom() throws Exception {
191 testRegionCrossingHFileSplit(BloomType.ROW);
192 }
193
194
195
196
197
198 @Test(timeout = 120000)
199 public void testRegionCrossingHFileSplitRowColBloom() throws Exception {
200 testRegionCrossingHFileSplit(BloomType.ROWCOL);
201 }
202
203 @Test
204 public void testSplitALot() throws Exception {
205 runTest("testSplitALot", BloomType.NONE,
206 new byte[][] {
207 Bytes.toBytes("aaaa"), Bytes.toBytes("bbb"),
208 Bytes.toBytes("ccc"), Bytes.toBytes("ddd"),
209 Bytes.toBytes("eee"), Bytes.toBytes("fff"),
210 Bytes.toBytes("ggg"), Bytes.toBytes("hhh"),
211 Bytes.toBytes("iii"), Bytes.toBytes("lll"),
212 Bytes.toBytes("mmm"), Bytes.toBytes("nnn"),
213 Bytes.toBytes("ooo"), Bytes.toBytes("ppp"),
214 Bytes.toBytes("qqq"), Bytes.toBytes("rrr"),
215 Bytes.toBytes("sss"), Bytes.toBytes("ttt"),
216 Bytes.toBytes("uuu"), Bytes.toBytes("vvv"),
217 Bytes.toBytes("zzz"),
218 },
219 new byte[][][] {
220 new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("zzz") },
221 }, 2);
222 }
223
224 private void testRegionCrossingHFileSplit(BloomType bloomType) throws Exception {
225 runTest("testHFileSplit" + bloomType + "Bloom", bloomType,
226 new byte[][] {
227 Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"),
228 Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"),
229 },
230 new byte[][][] {
231 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
232 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
233 }, 2);
234 }
235
236 private HTableDescriptor buildHTD(TableName tableName, BloomType bloomType) {
237 HTableDescriptor htd = new HTableDescriptor(tableName);
238 HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
239 familyDesc.setBloomFilterType(bloomType);
240 htd.addFamily(familyDesc);
241 return htd;
242 }
243
244 private void runTest(String testName, BloomType bloomType, byte[][][] hfileRanges,
245 int depth) throws Exception {
246 runTest(testName, bloomType, null, hfileRanges, depth);
247 }
248
249 private void runTest(String testName, BloomType bloomType, byte[][] tableSplitKeys,
250 byte[][][] hfileRanges, int depth) throws Exception {
251 final byte[] TABLE_NAME = Bytes.toBytes("mytable_"+testName);
252 final boolean preCreateTable = tableSplitKeys != null;
253
254
255 final TableName TABLE_WITHOUT_NS = TableName.valueOf(TABLE_NAME);
256 runTest(testName, TABLE_WITHOUT_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges, 2);
257
258
259
260
261
262
263
264
265 if (preCreateTable) {
266 runTest(testName + 2, TABLE_WITHOUT_NS, bloomType, true, tableSplitKeys, hfileRanges, 3);
267 }
268
269
270 final TableName TABLE_WITH_NS = TableName.valueOf(Bytes.toBytes(NAMESPACE), TABLE_NAME);
271 runTest(testName, TABLE_WITH_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges, depth);
272 }
273
274 private void runTest(String testName, TableName tableName, BloomType bloomType,
275 boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges, int depth)
276 throws Exception {
277 HTableDescriptor htd = buildHTD(tableName, bloomType);
278 runTest(testName, htd, bloomType, preCreateTable, tableSplitKeys, hfileRanges, depth);
279 }
280
281 private void runTest(String testName, HTableDescriptor htd, BloomType bloomType,
282 boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges, int depth)
283 throws Exception {
284
285 for (boolean managed : new boolean[] { true, false }) {
286 Path baseDirectory = util.getDataTestDirOnTestFS(testName);
287 FileSystem fs = util.getTestFileSystem();
288 baseDirectory = baseDirectory.makeQualified(fs);
289 Path parentDir = baseDirectory;
290 if (depth == 3) {
291 parentDir = new Path(baseDirectory, "someRegion");
292 }
293 Path familyDir = new Path(parentDir, Bytes.toString(FAMILY));
294
295 int hfileIdx = 0;
296 for (byte[][] range : hfileRanges) {
297 byte[] from = range[0];
298 byte[] to = range[1];
299 HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_"
300 + hfileIdx++), FAMILY, QUALIFIER, from, to, 1000);
301 }
302 int expectedRows = hfileIdx * 1000;
303
304 if (preCreateTable) {
305 util.getHBaseAdmin().createTable(htd, tableSplitKeys);
306 }
307
308 final TableName tableName = htd.getTableName();
309 if (!util.getHBaseAdmin().tableExists(tableName)) {
310 util.getHBaseAdmin().createTable(htd);
311 }
312 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
313 loader.setDepth(depth);
314 if (managed) {
315 try (HTable table = new HTable(util.getConfiguration(), tableName)) {
316 loader.doBulkLoad(baseDirectory, table);
317 assertEquals(expectedRows, util.countRows(table));
318 }
319 } else {
320 try (Connection conn = ConnectionFactory.createConnection(util.getConfiguration());
321 HTable table = (HTable) conn.getTable(tableName)) {
322 loader.doBulkLoad(baseDirectory, table);
323 }
324 }
325
326
327 Path stagingBasePath = SecureBulkLoadUtil.getBaseStagingDir(util.getConfiguration());
328 if (fs.exists(stagingBasePath)) {
329 FileStatus[] files = fs.listStatus(stagingBasePath);
330 for (FileStatus file : files) {
331 assertTrue("Folder=" + file.getPath() + " is not cleaned up.",
332 file.getPath().getName() != "DONOTERASE");
333 }
334 }
335
336 util.deleteTable(tableName);
337 }
338 }
339
340
341
342
343
344
345
346 @Test(timeout = 60000)
347 public void htestTagsSurviveBulkLoadSplit() throws Exception {
348 Path dir = util.getDataTestDirOnTestFS(tn.getMethodName());
349 FileSystem fs = util.getTestFileSystem();
350 dir = dir.makeQualified(fs);
351 Path familyDir = new Path(dir, Bytes.toString(FAMILY));
352
353 byte [][] tableSplitKeys = new byte[][] {
354 Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"),
355 Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"),
356 };
357
358
359 byte[] from = Bytes.toBytes("ddd");
360 byte[] to = Bytes.toBytes("ooo");
361 HFileTestUtil.createHFileWithTags(util.getConfiguration(), fs,
362 new Path(familyDir, tn.getMethodName()+"_hfile"),
363 FAMILY, QUALIFIER, from, to, 1000);
364 int expectedRows = 1000;
365
366 TableName tableName = TableName.valueOf(tn.getMethodName());
367 HTableDescriptor htd = buildHTD(tableName, BloomType.NONE);
368 util.getHBaseAdmin().createTable(htd, tableSplitKeys);
369
370 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
371 String [] args= {dir.toString(), tableName.toString()};
372 loader.run(args);
373
374 Table table = util.getConnection().getTable(tableName);
375 try {
376 assertEquals(expectedRows, util.countRows(table));
377 HFileTestUtil.verifyTags(table);
378 } finally {
379 table.close();
380 }
381
382 util.deleteTable(tableName);
383 }
384
385
386
387
388 @Test(timeout = 60000)
389 public void testNonexistentColumnFamilyLoad() throws Exception {
390 String testName = "testNonexistentColumnFamilyLoad";
391 byte[][][] hFileRanges = new byte[][][] {
392 new byte[][]{ Bytes.toBytes("aaa"), Bytes.toBytes("ccc") },
393 new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") },
394 };
395
396 final byte[] TABLE = Bytes.toBytes("mytable_"+testName);
397 HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE));
398
399
400 HColumnDescriptor family =
401 new HColumnDescriptor(Bytes.toBytes(new String(FAMILY).toUpperCase(Locale.ROOT)));
402 htd.addFamily(family);
403
404 try {
405 runTest(testName, htd, BloomType.NONE, true, SPLIT_KEYS, hFileRanges, 2);
406 assertTrue("Loading into table with non-existent family should have failed", false);
407 } catch (Exception e) {
408 assertTrue("IOException expected", e instanceof IOException);
409
410 String errMsg = e.getMessage();
411 assertTrue("Incorrect exception message, expected message: ["
412 + EXPECTED_MSG_FOR_NON_EXISTING_FAMILY + "], current message: [" + errMsg + "]",
413 errMsg.contains(EXPECTED_MSG_FOR_NON_EXISTING_FAMILY));
414 }
415 }
416
417 @Test(timeout = 120000)
418 public void testNonHfileFolderWithUnmatchedFamilyName() throws Exception {
419 testNonHfileFolder("testNonHfileFolderWithUnmatchedFamilyName", true);
420 }
421
422 @Test(timeout = 120000)
423 public void testNonHfileFolder() throws Exception {
424 testNonHfileFolder("testNonHfileFolder", false);
425 }
426
427
428
429
430
431
432 private void testNonHfileFolder(String tableName, boolean preCreateTable) throws Exception {
433 Path dir = util.getDataTestDirOnTestFS(tableName);
434 FileSystem fs = util.getTestFileSystem();
435 dir = dir.makeQualified(fs);
436
437 Path familyDir = new Path(dir, Bytes.toString(FAMILY));
438 HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_0"),
439 FAMILY, QUALIFIER, Bytes.toBytes("begin"), Bytes.toBytes("end"), 500);
440 createRandomDataFile(fs, new Path(familyDir, "012356789"), 16 * 1024);
441
442 final String NON_FAMILY_FOLDER = "_logs";
443 Path nonFamilyDir = new Path(dir, NON_FAMILY_FOLDER);
444 fs.mkdirs(nonFamilyDir);
445 fs.mkdirs(new Path(nonFamilyDir, "non-file"));
446 createRandomDataFile(fs, new Path(nonFamilyDir, "012356789"), 16 * 1024);
447
448 Table table = null;
449 try {
450 if (preCreateTable) {
451 table = util.createTable(TableName.valueOf(tableName), FAMILY);
452 } else {
453 table = util.getConnection().getTable(TableName.valueOf(tableName));
454 }
455
456 final String[] args = {dir.toString(), tableName};
457 new LoadIncrementalHFiles(util.getConfiguration()).run(args);
458 assertEquals(500, util.countRows(table));
459 } finally {
460 if (table != null) {
461 table.close();
462 }
463 fs.delete(dir, true);
464 }
465 }
466
467 private static void createRandomDataFile(FileSystem fs, Path path, int size)
468 throws IOException {
469 FSDataOutputStream stream = fs.create(path);
470 try {
471 byte[] data = new byte[1024];
472 for (int i = 0; i < data.length; ++i) {
473 data[i] = (byte)(i & 0xff);
474 }
475 while (size >= data.length) {
476 stream.write(data, 0, data.length);
477 size -= data.length;
478 }
479 if (size > 0) {
480 stream.write(data, 0, size);
481 }
482 } finally {
483 stream.close();
484 }
485 }
486
487 @Test(timeout = 120000)
488 public void testSplitStoreFile() throws IOException {
489 Path dir = util.getDataTestDirOnTestFS("testSplitHFile");
490 FileSystem fs = util.getTestFileSystem();
491 Path testIn = new Path(dir, "testhfile");
492 HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
493 HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER,
494 Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
495
496 Path bottomOut = new Path(dir, "bottom.out");
497 Path topOut = new Path(dir, "top.out");
498
499 LoadIncrementalHFiles.splitStoreFile(
500 util.getConfiguration(), testIn,
501 familyDesc, Bytes.toBytes("ggg"),
502 bottomOut,
503 topOut);
504
505 int rowCount = verifyHFile(bottomOut);
506 rowCount += verifyHFile(topOut);
507 assertEquals(1000, rowCount);
508 }
509
510 @Test
511 public void testSplitStoreFileWithNoneToNone() throws IOException {
512 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE);
513 }
514
515 @Test
516 public void testSplitStoreFileWithEncodedToEncoded() throws IOException {
517 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF);
518 }
519
520 @Test
521 public void testSplitStoreFileWithEncodedToNone() throws IOException {
522 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE);
523 }
524
525 @Test
526 public void testSplitStoreFileWithNoneToEncoded() throws IOException {
527 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF);
528 }
529
530 private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding,
531 DataBlockEncoding cfEncoding) throws IOException {
532 Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding");
533 FileSystem fs = util.getTestFileSystem();
534 Path testIn = new Path(dir, "testhfile");
535 HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
536 familyDesc.setDataBlockEncoding(cfEncoding);
537 HFileTestUtil.createHFileWithDataBlockEncoding(
538 util.getConfiguration(), fs, testIn, bulkloadEncoding,
539 FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
540
541 Path bottomOut = new Path(dir, "bottom.out");
542 Path topOut = new Path(dir, "top.out");
543
544 LoadIncrementalHFiles.splitStoreFile(
545 util.getConfiguration(), testIn,
546 familyDesc, Bytes.toBytes("ggg"),
547 bottomOut,
548 topOut);
549
550 int rowCount = verifyHFile(bottomOut);
551 rowCount += verifyHFile(topOut);
552 assertEquals(1000, rowCount);
553 }
554
555 private int verifyHFile(Path p) throws IOException {
556 Configuration conf = util.getConfiguration();
557 HFile.Reader reader = HFile.createReader(
558 p.getFileSystem(conf), p, new CacheConfig(conf), conf);
559 reader.loadFileInfo();
560 HFileScanner scanner = reader.getScanner(false, false);
561 scanner.seekTo();
562 int count = 0;
563 do {
564 count++;
565 } while (scanner.next());
566 assertTrue(count > 0);
567 reader.close();
568 return count;
569 }
570
571 private void addStartEndKeysForTest(TreeMap<byte[], Integer> map, byte[] first, byte[] last) {
572 Integer value = map.containsKey(first)?map.get(first):0;
573 map.put(first, value+1);
574
575 value = map.containsKey(last)?map.get(last):0;
576 map.put(last, value-1);
577 }
578
579 @Test(timeout = 120000)
580 public void testInferBoundaries() {
581 TreeMap<byte[], Integer> map = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
582
583
584
585
586
587
588
589
590
591
592
593 String first;
594 String last;
595
596 first = "a"; last = "e";
597 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
598
599 first = "r"; last = "s";
600 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
601
602 first = "o"; last = "p";
603 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
604
605 first = "g"; last = "k";
606 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
607
608 first = "v"; last = "x";
609 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
610
611 first = "c"; last = "i";
612 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
613
614 first = "m"; last = "q";
615 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
616
617 first = "s"; last = "t";
618 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
619
620 first = "u"; last = "w";
621 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
622
623 byte[][] keysArray = LoadIncrementalHFiles.inferBoundaries(map);
624 byte[][] compare = new byte[3][];
625 compare[0] = "m".getBytes();
626 compare[1] = "r".getBytes();
627 compare[2] = "u".getBytes();
628
629 assertEquals(keysArray.length, 3);
630
631 for (int row = 0; row<keysArray.length; row++){
632 assertArrayEquals(keysArray[row], compare[row]);
633 }
634 }
635
636 @Test(timeout = 60000)
637 public void testLoadTooMayHFiles() throws Exception {
638 Path dir = util.getDataTestDirOnTestFS("testLoadTooMayHFiles");
639 FileSystem fs = util.getTestFileSystem();
640 dir = dir.makeQualified(fs);
641 Path familyDir = new Path(dir, Bytes.toString(FAMILY));
642
643 byte[] from = Bytes.toBytes("begin");
644 byte[] to = Bytes.toBytes("end");
645 for (int i = 0; i <= MAX_FILES_PER_REGION_PER_FAMILY; i++) {
646 HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_"
647 + i), FAMILY, QUALIFIER, from, to, 1000);
648 }
649
650 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
651 String [] args= {dir.toString(), "mytable_testLoadTooMayHFiles"};
652 try {
653 loader.run(args);
654 fail("Bulk loading too many files should fail");
655 } catch (IOException ie) {
656 assertTrue(ie.getMessage().contains("Trying to load more than "
657 + MAX_FILES_PER_REGION_PER_FAMILY + " hfiles"));
658 }
659 }
660
661 @Test(expected = TableNotFoundException.class)
662 public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception {
663 Configuration conf = util.getConfiguration();
664 conf.set(LoadIncrementalHFiles.CREATE_TABLE_CONF_KEY, "no");
665 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
666 String[] args = { "directory", "nonExistingTable" };
667 loader.run(args);
668 }
669
670 @Test(timeout = 120000)
671 public void testTableWithCFNameStartWithUnderScore() throws Exception {
672 Path dir = util.getDataTestDirOnTestFS("cfNameStartWithUnderScore");
673 FileSystem fs = util.getTestFileSystem();
674 dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory());
675 String family = "_cf";
676 Path familyDir = new Path(dir, family);
677
678 byte[] from = Bytes.toBytes("begin");
679 byte[] to = Bytes.toBytes("end");
680 Configuration conf = util.getConfiguration();
681 String tableName = "mytable_cfNameStartWithUnderScore";
682 Table table = util.createTable(TableName.valueOf(tableName), family);
683 HFileTestUtil.createHFile(conf, fs, new Path(familyDir, "hfile"), Bytes.toBytes(family),
684 QUALIFIER, from, to, 1000);
685
686 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
687 String[] args = { dir.toString(), tableName };
688 try {
689 loader.run(args);
690 assertEquals(1000, util.countRows(table));
691 } finally {
692 if (null != table) {
693 table.close();
694 }
695 }
696 }
697 }
698