View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.mapreduce;
19  
20  import static org.apache.hadoop.hbase.HConstants.RPC_CODEC_CONF_KEY;
21  import static org.apache.hadoop.hbase.ipc.RpcClient.DEFAULT_CODEC_CLASS;
22  import static org.junit.Assert.assertEquals;
23  import static org.junit.Assert.assertFalse;
24  import static org.junit.Assert.assertTrue;
25  import static org.junit.Assert.fail;
26  import static org.mockito.Matchers.any;
27  import static org.mockito.Mockito.doAnswer;
28  import static org.mockito.Mockito.mock;
29  import static org.mockito.Mockito.when;
30  
31  import java.io.ByteArrayOutputStream;
32  import java.io.File;
33  import java.io.IOException;
34  import java.io.PrintStream;
35  import java.net.URL;
36  import java.util.ArrayList;
37  import java.util.Arrays;
38  import java.util.List;
39  
40  import org.apache.commons.logging.Log;
41  import org.apache.commons.logging.LogFactory;
42  import org.apache.hadoop.conf.Configuration;
43  import org.apache.hadoop.fs.FileSystem;
44  import org.apache.hadoop.fs.Path;
45  import org.apache.hadoop.hbase.CellScanner;
46  import org.apache.hadoop.hbase.HConstants;
47  import org.apache.hadoop.hbase.Cell;
48  import org.apache.hadoop.hbase.CellUtil;
49  import org.apache.hadoop.hbase.CategoryBasedTimeout;
50  import org.apache.hadoop.hbase.HBaseTestingUtility;
51  import org.apache.hadoop.hbase.HColumnDescriptor;
52  import org.apache.hadoop.hbase.HRegionInfo;
53  import org.apache.hadoop.hbase.HTableDescriptor;
54  import org.apache.hadoop.hbase.KeyValue;
55  import org.apache.hadoop.hbase.Tag;
56  import org.apache.hadoop.hbase.TagRewriteCell;
57  import org.apache.hadoop.hbase.client.Connection;
58  import org.apache.hadoop.hbase.client.ConnectionFactory;
59  import org.apache.hadoop.hbase.client.Mutation;
60  import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
61  import org.apache.hadoop.hbase.coprocessor.ObserverContext;
62  import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
63  import org.apache.hadoop.hbase.regionserver.HRegion;
64  import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
65  import org.apache.hadoop.hbase.regionserver.RegionScanner;
66  import org.apache.hadoop.hbase.testclassification.MediumTests;
67  import org.apache.hadoop.hbase.TableName;
68  import org.apache.hadoop.hbase.client.Delete;
69  import org.apache.hadoop.hbase.client.Durability;
70  import org.apache.hadoop.hbase.client.Get;
71  import org.apache.hadoop.hbase.client.HTable;
72  import org.apache.hadoop.hbase.client.Put;
73  import org.apache.hadoop.hbase.client.Result;
74  import org.apache.hadoop.hbase.client.ResultScanner;
75  import org.apache.hadoop.hbase.client.Scan;
76  import org.apache.hadoop.hbase.client.Table;
77  import org.apache.hadoop.hbase.filter.Filter;
78  import org.apache.hadoop.hbase.filter.FilterBase;
79  import org.apache.hadoop.hbase.filter.PrefixFilter;
80  import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
81  import org.apache.hadoop.hbase.mapreduce.Import.KeyValueImporter;
82  import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
83  import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
84  import org.apache.hadoop.hbase.wal.WAL;
85  import org.apache.hadoop.hbase.wal.WALKey;
86  import org.apache.hadoop.hbase.util.Bytes;
87  import org.apache.hadoop.hbase.util.LauncherSecurityManager;
88  import org.apache.hadoop.mapreduce.Job;
89  import org.apache.hadoop.mapreduce.Mapper.Context;
90  import org.apache.hadoop.util.GenericOptionsParser;
91  import org.junit.After;
92  import org.junit.AfterClass;
93  import org.junit.Assert;
94  import org.junit.Before;
95  import org.junit.BeforeClass;
96  import org.junit.Rule;
97  import org.junit.Test;
98  import org.junit.experimental.categories.Category;
99  import org.junit.rules.TestRule;
100 import org.mockito.invocation.InvocationOnMock;
101 import org.mockito.stubbing.Answer;
102 
103 /**
104  * Tests the table import and table export MR job functionality
105  */
106 @Category(MediumTests.class)
107 public class TestImportExport {
108   private static final Log LOG = LogFactory.getLog(TestImportExport.class);
109   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
110   private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1");
111   private static final byte[] ROW2 = Bytes.toBytesBinary("\\x32row2");
112   private static final byte[] ROW3 = Bytes.toBytesBinary("\\x32row3");
113   private static final String FAMILYA_STRING = "a";
114   private static final String FAMILYB_STRING = "b";
115   private static final byte[] FAMILYA = Bytes.toBytes(FAMILYA_STRING);
116   private static final byte[] FAMILYB = Bytes.toBytes(FAMILYB_STRING);
117   private static final byte[] QUAL = Bytes.toBytes("q");
118   private static final String OUTPUT_DIR = "outputdir";
119   private static String FQ_OUTPUT_DIR;
120   private static final String EXPORT_BATCH_SIZE = "100";
121 
122   private static long now = System.currentTimeMillis();
123   public static final byte TEST_TAG_TYPE =  (byte) (65);
124   public static final String TEST_ATTR = "source_op";
125   public static final String TEST_TAG = "test_tag";
126 
127   @Rule
128   public final TestRule timeout = CategoryBasedTimeout.builder().withTimeout(this.getClass()).
129           withLookingForStuckThread(true).build();
130 
131   @BeforeClass
132   public static void beforeClass() throws Exception {
133     // Up the handlers; this test needs more than usual.
134     UTIL.getConfiguration().setBoolean(HBaseTestingUtility.USE_LOCAL_FILESYSTEM, true);
135     UTIL.getConfiguration().setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10);
136     UTIL.setJobWithoutMRCluster();
137     UTIL.startMiniCluster();
138     FQ_OUTPUT_DIR =
139       new Path(OUTPUT_DIR).makeQualified(FileSystem.get(UTIL.getConfiguration())).toString();
140   }
141 
142   @AfterClass
143   public static void afterClass() throws Exception {
144     UTIL.shutdownMiniCluster();
145   }
146 
147   @Before
148   @After
149   public void cleanup() throws Exception {
150     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
151     fs.delete(new Path(OUTPUT_DIR), true);
152   }
153 
154   /**
155    * Runs an export job with the specified command line args
156    * @param args
157    * @return true if job completed successfully
158    * @throws IOException
159    * @throws InterruptedException
160    * @throws ClassNotFoundException
161    */
162   boolean runExport(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
163     // need to make a copy of the configuration because to make sure different temp dirs are used.
164     GenericOptionsParser opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
165     Configuration conf = opts.getConfiguration();
166     args = opts.getRemainingArgs();
167     Job job = Export.createSubmittableJob(conf, args);
168     job.waitForCompletion(false);
169     return job.isSuccessful();
170   }
171 
172   /**
173    * Runs an import job with the specified command line args
174    * @param args
175    * @return true if job completed successfully
176    * @throws IOException
177    * @throws InterruptedException
178    * @throws ClassNotFoundException
179    */
180   boolean runImport(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
181     // need to make a copy of the configuration because to make sure different temp dirs are used.
182     GenericOptionsParser opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
183     Configuration conf = opts.getConfiguration();
184     args = opts.getRemainingArgs();
185     Job job = Import.createSubmittableJob(conf, args);
186     job.waitForCompletion(false);
187     return job.isSuccessful();
188   }
189 
190   /**
191    * Test simple replication case with column mapping
192    * @throws Exception
193    */
194   @Test
195   public void testSimpleCase() throws Exception {
196     String EXPORT_TABLE = "exportSimpleCase";
197     try (Table t = UTIL.createTable(TableName.valueOf(EXPORT_TABLE), FAMILYA, 3);) {
198       Put p = new Put(ROW1);
199       p.add(FAMILYA, QUAL, now, QUAL);
200       p.add(FAMILYA, QUAL, now+1, QUAL);
201       p.add(FAMILYA, QUAL, now+2, QUAL);
202       t.put(p);
203       p = new Put(ROW2);
204       p.add(FAMILYA, QUAL, now, QUAL);
205       p.add(FAMILYA, QUAL, now+1, QUAL);
206       p.add(FAMILYA, QUAL, now+2, QUAL);
207       t.put(p);
208       p = new Put(ROW3);
209       p.add(FAMILYA, QUAL, now, QUAL);
210       p.add(FAMILYA, QUAL, now + 1, QUAL);
211       p.add(FAMILYA, QUAL, now + 2, QUAL);
212       t.put(p);
213     }
214 
215     String[] args = new String[] {
216         // Only export row1 & row2.
217         "-D" + TableInputFormat.SCAN_ROW_START + "=\\x32row1",
218         "-D" + TableInputFormat.SCAN_ROW_STOP + "=\\x32row3",
219         EXPORT_TABLE,
220         FQ_OUTPUT_DIR,
221         "1000", // max number of key versions per key to export
222     };
223     assertTrue(runExport(args));
224 
225     String IMPORT_TABLE = "importTableSimpleCase";
226     try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), FAMILYB, 3);) {
227       args = new String[] {
228         "-D" + Import.CF_RENAME_PROP + "="+FAMILYA_STRING+":"+FAMILYB_STRING,
229         IMPORT_TABLE,
230         FQ_OUTPUT_DIR
231       };
232       assertTrue(runImport(args));
233 
234       Get g = new Get(ROW1);
235       g.setMaxVersions();
236       Result r = t.get(g);
237       assertEquals(3, r.size());
238       g = new Get(ROW2);
239       g.setMaxVersions();
240       r = t.get(g);
241       assertEquals(3, r.size());
242       g = new Get(ROW3);
243       r = t.get(g);
244       assertEquals(0, r.size());
245     }
246   }
247 
248   /**
249    * Test export hbase:meta table
250    *
251    * @throws Exception
252    */
253   @Test
254   public void testMetaExport() throws Exception {
255     String EXPORT_TABLE = TableName.META_TABLE_NAME.getNameAsString();
256     String[] args = new String[] { EXPORT_TABLE, FQ_OUTPUT_DIR, "1", "0", "0" };
257     assertTrue(runExport(args));
258   }
259 
260   /**
261    * Test import data from 0.94 exported file
262    * @throws Exception
263    */
264   @Test
265   public void testImport94Table() throws Exception {
266     final String name = "exportedTableIn94Format";
267     URL url = TestImportExport.class.getResource(name);
268     File f = new File(url.toURI());
269     if (!f.exists()) {
270       LOG.warn("FAILED TO FIND " + f + "; skipping out on test");
271       return;
272     }
273     assertTrue(f.exists());
274     LOG.info("FILE=" + f);
275     Path importPath = new Path(f.toURI());
276     FileSystem fs = FileSystem.get(UTIL.getConfiguration());
277     fs.copyFromLocalFile(importPath, new Path(FQ_OUTPUT_DIR + Path.SEPARATOR + name));
278     String IMPORT_TABLE = name;
279     try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), Bytes.toBytes("f1"), 3);) {
280       String[] args = new String[] {
281           "-Dhbase.import.version=0.94" ,
282           IMPORT_TABLE, FQ_OUTPUT_DIR
283       };
284       assertTrue(runImport(args));
285       /* exportedTableIn94Format contains 5 rows
286       ROW         COLUMN+CELL
287       r1          column=f1:c1, timestamp=1383766761171, value=val1
288       r2          column=f1:c1, timestamp=1383766771642, value=val2
289       r3          column=f1:c1, timestamp=1383766777615, value=val3
290       r4          column=f1:c1, timestamp=1383766785146, value=val4
291       r5          column=f1:c1, timestamp=1383766791506, value=val5
292       */
293      assertEquals(5, UTIL.countRows(t));
294     }
295   }
296 
297   /**
298    * Test export scanner batching
299    */
300    @Test
301    public void testExportScannerBatching() throws Exception {
302     String BATCH_TABLE = "exportWithBatch";
303     HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(BATCH_TABLE));
304     desc.addFamily(new HColumnDescriptor(FAMILYA)
305         .setMaxVersions(1)
306     );
307     UTIL.getHBaseAdmin().createTable(desc);
308     try (Table t = new HTable(UTIL.getConfiguration(), desc.getTableName());) {
309       Put p = new Put(ROW1);
310       p.add(FAMILYA, QUAL, now, QUAL);
311       p.add(FAMILYA, QUAL, now+1, QUAL);
312       p.add(FAMILYA, QUAL, now+2, QUAL);
313       p.add(FAMILYA, QUAL, now+3, QUAL);
314       p.add(FAMILYA, QUAL, now+4, QUAL);
315       t.put(p);
316 
317       String[] args = new String[] {
318         "-D" + Export.EXPORT_BATCHING + "=" + EXPORT_BATCH_SIZE,  // added scanner batching arg.
319         BATCH_TABLE,
320         FQ_OUTPUT_DIR
321       };
322       assertTrue(runExport(args));
323 
324       FileSystem fs = FileSystem.get(UTIL.getConfiguration());
325       fs.delete(new Path(FQ_OUTPUT_DIR), true);
326     }
327   }
328 
329   @Test
330   public void testWithDeletes() throws Exception {
331     String IMPORT_TABLE = "importWithDeletes";
332     String EXPORT_TABLE = "exportWithDeletes";
333     HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE));
334     desc.addFamily(new HColumnDescriptor(FAMILYA)
335         .setMaxVersions(5)
336         .setKeepDeletedCells(true)
337     );
338     UTIL.getHBaseAdmin().createTable(desc);
339     try (Table t = new HTable(UTIL.getConfiguration(), desc.getTableName());) {
340       Put p = new Put(ROW1);
341       p.add(FAMILYA, QUAL, now, QUAL);
342       p.add(FAMILYA, QUAL, now+1, QUAL);
343       p.add(FAMILYA, QUAL, now+2, QUAL);
344       p.add(FAMILYA, QUAL, now+3, QUAL);
345       p.add(FAMILYA, QUAL, now+4, QUAL);
346       t.put(p);
347 
348       Delete d = new Delete(ROW1, now+3);
349       t.delete(d);
350       d = new Delete(ROW1);
351       d.deleteColumns(FAMILYA, QUAL, now+2);
352       t.delete(d);
353 
354       String[] args = new String[] {
355         "-D" + Export.RAW_SCAN + "=true",
356         EXPORT_TABLE,
357         FQ_OUTPUT_DIR,
358         "1000", // max number of key versions per key to export
359       };
360       assertTrue(runExport(args));
361 
362       desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE));
363       desc.addFamily(new HColumnDescriptor(FAMILYA)
364         .setMaxVersions(5)
365         .setKeepDeletedCells(true)
366       );
367     }
368     UTIL.getHBaseAdmin().createTable(desc);
369     try (Table t = new HTable(UTIL.getConfiguration(), desc.getTableName());) {
370        String [] args = new String[] {
371         IMPORT_TABLE,
372         FQ_OUTPUT_DIR
373        };
374        assertTrue(runImport(args));
375 
376        Scan s = new Scan();
377        s.setMaxVersions();
378        s.setRaw(true);
379        ResultScanner scanner = t.getScanner(s);
380        Result r = scanner.next();
381        Cell[] res = r.rawCells();
382        assertTrue(CellUtil.isDeleteFamily(res[0]));
383        assertEquals(now+4, res[1].getTimestamp());
384        assertEquals(now+3, res[2].getTimestamp());
385        assertTrue(CellUtil.isDelete(res[3]));
386        assertEquals(now+2, res[4].getTimestamp());
387       assertEquals(now+1, res[5].getTimestamp());
388       assertEquals(now, res[6].getTimestamp());
389     }
390   }
391 
392   @Test
393   public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception {
394     String EXPORT_TABLE = "exportWithMultipleDeleteFamilyMarkersOfSameRowSameFamily";
395     HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE));
396     desc.addFamily(new HColumnDescriptor(FAMILYA)
397         .setMaxVersions(5)
398         .setKeepDeletedCells(true)
399     );
400     UTIL.getHBaseAdmin().createTable(desc);
401     HTable exportT = new HTable(UTIL.getConfiguration(), EXPORT_TABLE);
402       //Add first version of QUAL
403       Put p = new Put(ROW1);
404     p.add(FAMILYA, QUAL, now, QUAL);
405       exportT.put(p);
406 
407       //Add Delete family marker
408       Delete d = new Delete(ROW1, now+3);
409       exportT.delete(d);
410 
411     //Add second version of QUAL
412     p = new Put(ROW1);
413     p.add(FAMILYA, QUAL, now + 5, "s".getBytes());
414     exportT.put(p);
415 
416     //Add second Delete family marker
417     d = new Delete(ROW1, now+7);
418     exportT.delete(d);
419 
420 
421     String[] args = new String[] {
422         "-D" + Export.RAW_SCAN + "=true",
423         EXPORT_TABLE,
424         FQ_OUTPUT_DIR,
425         "1000", // max number of key versions per key to export
426     };
427     assertTrue(runExport(args));
428 
429     String IMPORT_TABLE = "importWithMultipleDeleteFamilyMarkersOfSameRowSameFamily";
430     desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE));
431     desc.addFamily(new HColumnDescriptor(FAMILYA)
432         .setMaxVersions(5)
433         .setKeepDeletedCells(true)
434     );
435     UTIL.getHBaseAdmin().createTable(desc);
436 
437     HTable importT = new HTable(UTIL.getConfiguration(), IMPORT_TABLE);
438     args = new String[] {
439         IMPORT_TABLE,
440         FQ_OUTPUT_DIR
441     };
442     assertTrue(runImport(args));
443 
444     Scan s = new Scan();
445     s.setMaxVersions();
446     s.setRaw(true);
447 
448     ResultScanner importedTScanner = importT.getScanner(s);
449     Result importedTResult = importedTScanner.next();
450 
451     ResultScanner exportedTScanner = exportT.getScanner(s);
452     Result  exportedTResult =  exportedTScanner.next();
453     try {
454       Result.compareResults(exportedTResult, importedTResult);
455     } catch (Exception e) {
456       fail("Original and imported tables data comparision failed with error:"+e.getMessage());
457     } finally {
458       exportT.close();
459       importT.close();
460     }
461   }
462 
463   /**
464    * Create a simple table, run an Export Job on it, Import with filtering on,  verify counts,
465    * attempt with invalid values.
466    */
467   @Test
468   public void testWithFilter() throws Exception {
469     // Create simple table to export
470     String EXPORT_TABLE = "exportSimpleCase_ImportWithFilter";
471     HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(EXPORT_TABLE));
472     desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
473     UTIL.getHBaseAdmin().createTable(desc);
474     Table exportTable = new HTable(UTIL.getConfiguration(), desc.getTableName());
475 
476     Put p1 = new Put(ROW1);
477     p1.add(FAMILYA, QUAL, now, QUAL);
478     p1.add(FAMILYA, QUAL, now + 1, QUAL);
479     p1.add(FAMILYA, QUAL, now + 2, QUAL);
480     p1.add(FAMILYA, QUAL, now + 3, QUAL);
481     p1.add(FAMILYA, QUAL, now + 4, QUAL);
482 
483     // Having another row would actually test the filter.
484     Put p2 = new Put(ROW2);
485     p2.add(FAMILYA, QUAL, now, QUAL);
486 
487     exportTable.put(Arrays.asList(p1, p2));
488 
489     // Export the simple table
490     String[] args = new String[] { EXPORT_TABLE, FQ_OUTPUT_DIR, "1000" };
491     assertTrue(runExport(args));
492 
493     // Import to a new table
494     String IMPORT_TABLE = "importWithFilter";
495     desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE));
496     desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
497     UTIL.getHBaseAdmin().createTable(desc);
498 
499     Table importTable = new HTable(UTIL.getConfiguration(), desc.getTableName());
500     args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(),
501         "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE,
502         FQ_OUTPUT_DIR,
503         "1000" };
504     assertTrue(runImport(args));
505 
506     // get the count of the source table for that time range
507     PrefixFilter filter = new PrefixFilter(ROW1);
508     int count = getCount(exportTable, filter);
509 
510     Assert.assertEquals("Unexpected row count between export and import tables", count,
511       getCount(importTable, null));
512 
513     // and then test that a broken command doesn't bork everything - easier here because we don't
514     // need to re-run the export job
515 
516     args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(),
517         "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", EXPORT_TABLE,
518         FQ_OUTPUT_DIR, "1000" };
519     assertFalse(runImport(args));
520 
521     // cleanup
522     exportTable.close();
523     importTable.close();
524   }
525 
526   /**
527    * Count the number of keyvalues in the specified table for the given timerange
528    * @param start
529    * @param end
530    * @param table
531    * @return
532    * @throws IOException
533    */
534   private int getCount(Table table, Filter filter) throws IOException {
535     Scan scan = new Scan();
536     scan.setFilter(filter);
537     ResultScanner results = table.getScanner(scan);
538     int count = 0;
539     for (Result res : results) {
540       count += res.size();
541     }
542     results.close();
543     return count;
544   }
545 
546   /**
547    * test main method. Import should print help and call System.exit
548    */
549   @Test
550   public void testImportMain() throws Exception {
551     PrintStream oldPrintStream = System.err;
552     SecurityManager SECURITY_MANAGER = System.getSecurityManager();
553     LauncherSecurityManager newSecurityManager= new LauncherSecurityManager();
554     System.setSecurityManager(newSecurityManager);
555     ByteArrayOutputStream data = new ByteArrayOutputStream();
556     String[] args = {};
557     System.setErr(new PrintStream(data));
558     try {
559       System.setErr(new PrintStream(data));
560       Import.main(args);
561       fail("should be SecurityException");
562     } catch (SecurityException e) {
563       assertEquals(-1, newSecurityManager.getExitCode());
564       assertTrue(data.toString().contains("Wrong number of arguments:"));
565       assertTrue(data.toString().contains("-Dimport.bulk.output=/path/for/output"));
566       assertTrue(data.toString().contains("-Dimport.filter.class=<name of filter class>"));
567       assertTrue(data.toString().contains("-Dimport.bulk.output=/path/for/output"));
568       assertTrue(data.toString().contains("-Dmapreduce.reduce.speculative=false"));
569     } finally {
570       System.setErr(oldPrintStream);
571       System.setSecurityManager(SECURITY_MANAGER);
572     }
573   }
574 
575   /**
576    * test main method. Export should print help and call System.exit
577    */
578   @Test
579   public void testExportMain() throws Exception {
580     PrintStream oldPrintStream = System.err;
581     SecurityManager SECURITY_MANAGER = System.getSecurityManager();
582     LauncherSecurityManager newSecurityManager= new LauncherSecurityManager();
583     System.setSecurityManager(newSecurityManager);
584     ByteArrayOutputStream data = new ByteArrayOutputStream();
585     String[] args = {};
586     System.setErr(new PrintStream(data));
587     try {
588       System.setErr(new PrintStream(data));
589       Export.main(args);
590       fail("should be SecurityException");
591     } catch (SecurityException e) {
592       assertEquals(-1, newSecurityManager.getExitCode());
593       assertTrue(data.toString().contains("Wrong number of arguments:"));
594       assertTrue(data.toString().contains(
595               "Usage: Export [-D <property=value>]* <tablename> <outputdir> [<versions> " +
596               "[<starttime> [<endtime>]] [^[regex pattern] or [Prefix] to filter]]"));
597       assertTrue(data.toString().contains("-D hbase.mapreduce.scan.column.family=<familyName>"));
598       assertTrue(data.toString().contains("-D hbase.mapreduce.include.deleted.rows=true"));
599       assertTrue(data.toString().contains("-Dhbase.client.scanner.caching=100"));
600       assertTrue(data.toString().contains("-Dmapreduce.map.speculative=false"));
601       assertTrue(data.toString().contains("-Dmapreduce.reduce.speculative=false"));
602       assertTrue(data.toString().contains("-Dhbase.export.scanner.batch=10"));
603     } finally {
604       System.setErr(oldPrintStream);
605       System.setSecurityManager(SECURITY_MANAGER);
606     }
607   }
608 
609   /**
610    * Test map method of Importer
611    */
612   @SuppressWarnings({ "unchecked", "rawtypes" })
613   @Test
614   public void testKeyValueImporter() throws Exception {
615     KeyValueImporter importer = new KeyValueImporter();
616     Configuration configuration = new Configuration();
617     Context ctx = mock(Context.class);
618     when(ctx.getConfiguration()).thenReturn(configuration);
619 
620     doAnswer(new Answer<Void>() {
621 
622       @Override
623       public Void answer(InvocationOnMock invocation) throws Throwable {
624         ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0];
625         KeyValue key = (KeyValue) invocation.getArguments()[1];
626         assertEquals("Key", Bytes.toString(writer.get()));
627         assertEquals("row", Bytes.toString(key.getRow()));
628         return null;
629       }
630     }).when(ctx).write(any(ImmutableBytesWritable.class), any(KeyValue.class));
631 
632     importer.setup(ctx);
633     Result value = mock(Result.class);
634     KeyValue[] keys = {
635         new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"),
636             Bytes.toBytes("value")),
637         new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"),
638             Bytes.toBytes("value1")) };
639     when(value.rawCells()).thenReturn(keys);
640     importer.map(new ImmutableBytesWritable(Bytes.toBytes("Key")), value, ctx);
641 
642   }
643 
644   /**
645    * Test addFilterAndArguments method of Import This method set couple
646    * parameters into Configuration
647    */
648   @Test
649   public void testAddFilterAndArguments() throws IOException {
650     Configuration configuration = new Configuration();
651 
652     List<String> args = new ArrayList<String>();
653     args.add("param1");
654     args.add("param2");
655 
656     Import.addFilterAndArguments(configuration, FilterBase.class, args);
657     assertEquals("org.apache.hadoop.hbase.filter.FilterBase",
658         configuration.get(Import.FILTER_CLASS_CONF_KEY));
659     assertEquals("param1,param2", configuration.get(Import.FILTER_ARGS_CONF_KEY));
660   }
661 
662   @Test
663   public void testDurability() throws IOException, InterruptedException, ClassNotFoundException {
664     // Create an export table.
665     String exportTableName = "exporttestDurability";
666     try (Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3);) {
667       // Insert some data
668       Put put = new Put(ROW1);
669       put.add(FAMILYA, QUAL, now, QUAL);
670       put.add(FAMILYA, QUAL, now + 1, QUAL);
671       put.add(FAMILYA, QUAL, now + 2, QUAL);
672       exportTable.put(put);
673 
674       put = new Put(ROW2);
675       put.add(FAMILYA, QUAL, now, QUAL);
676       put.add(FAMILYA, QUAL, now + 1, QUAL);
677       put.add(FAMILYA, QUAL, now + 2, QUAL);
678       exportTable.put(put);
679     }
680 
681     // Run the export
682     String[] args = new String[] { exportTableName, FQ_OUTPUT_DIR, "1000"};
683     assertTrue(runExport(args));
684 
685     // Create the table for import
686     String importTableName = "importTestDurability1";
687     WAL wal = null;
688     HRegionInfo region = null;
689     TableWALActionListener walListener = null;
690     try (Table importTable =
691       UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);) {
692 
693       // Register the wal listener for the import table
694       walListener = new TableWALActionListener(importTableName);
695       region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer()
696           .getOnlineRegions(importTable.getName()).get(0).getRegionInfo();
697       wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region);
698       wal.registerWALActionsListener(walListener);
699 
700       // Run the import with SKIP_WAL
701       args =
702           new String[] { "-D" + Import.WAL_DURABILITY + "=" + Durability.SKIP_WAL.name(),
703             importTableName, FQ_OUTPUT_DIR };
704       assertTrue(runImport(args));
705       //Assert that the wal is not visisted
706       assertTrue(!walListener.isWALVisited());
707       //Ensure that the count is 2 (only one version of key value is obtained)
708       assertTrue(getCount(importTable, null) == 2);
709 
710       // Run the import with the default durability option
711     }
712     importTableName = "importTestDurability2";
713     try (Table importTable =
714         UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);) {
715       region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer()
716         .getOnlineRegions(importTable.getName()).get(0).getRegionInfo();
717       wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region);
718       walListener = new TableWALActionListener(importTableName);
719       wal.registerWALActionsListener(walListener);
720       args = new String[] { importTableName, FQ_OUTPUT_DIR };
721       assertTrue(runImport(args));
722       //Assert that the wal is visisted
723       assertTrue(walListener.isWALVisited());
724       //Ensure that the count is 2 (only one version of key value is obtained)
725       assertTrue(getCount(importTable, null) == 2);
726     }
727   }
728 
729   /**
730    * This listens to the {@link #visitLogEntryBeforeWrite(HTableDescriptor, WALKey, WALEdit)} to
731    * identify that an entry is written to the Write Ahead Log for the given table.
732    */
733   private static class TableWALActionListener extends WALActionsListener.Base {
734 
735     private String tableName;
736     private boolean isVisited = false;
737 
738     public TableWALActionListener(String tableName) {
739       this.tableName = tableName;
740     }
741 
742     @Override
743     public void visitLogEntryBeforeWrite(HTableDescriptor htd, WALKey logKey, WALEdit logEdit) {
744       if (tableName.equalsIgnoreCase(htd.getNameAsString())) {
745         isVisited = true;
746       }
747     }
748 
749     public boolean isWALVisited() {
750       return isVisited;
751     }
752   }
753 
754   /**
755    *  Add cell tags to delete mutations, run export and import tool and
756    *  verify that tags are present in import table also.
757    * @throws Throwable throws Throwable.
758    */
759   @Test
760   public void testTagsAddition() throws Throwable {
761     final TableName exportTable = TableName.valueOf("exportWithTestTagsAddition");
762     HTableDescriptor desc = new HTableDescriptor(exportTable)
763       .addCoprocessor(MetadataController.class.getName());
764     desc.addFamily(new HColumnDescriptor(FAMILYA)
765         .setMaxVersions(5)
766         .setKeepDeletedCells(true));
767 
768     UTIL.getHBaseAdmin().createTable(desc);
769     Table exportT = UTIL.getConnection().getTable(exportTable);
770 
771     //Add first version of QUAL
772     Put p = new Put(ROW1);
773     p.addColumn(FAMILYA, QUAL, now, QUAL);
774     exportT.put(p);
775 
776     //Add Delete family marker
777     Delete d = new Delete(ROW1, now+3);
778     // Add test attribute to delete mutation.
779     d.setAttribute(TEST_ATTR, Bytes.toBytes(TEST_TAG));
780     exportT.delete(d);
781 
782     // Run export tool with KeyValueCodecWithTags as Codec. This will ensure that export tool
783     // will use KeyValueCodecWithTags.
784     String[] args = new String[] {
785       "-D" + Export.RAW_SCAN + "=true",
786       // This will make sure that codec will encode and decode tags in rpc call.
787       "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
788       exportTable.getNameAsString(),
789       FQ_OUTPUT_DIR,
790       "1000", // max number of key versions per key to export
791     };
792     assertTrue(runExport(args));
793     // Assert tag exists in exportTable
794     checkWhetherTagExists(exportTable, true);
795 
796     // Create an import table with MetadataController.
797     final TableName importTable = TableName.valueOf("importWithTestTagsAddition");
798     HTableDescriptor importTableDesc = new HTableDescriptor(importTable)
799       .addCoprocessor(MetadataController.class.getName());
800     importTableDesc.addFamily(new HColumnDescriptor(FAMILYA)
801       .setMaxVersions(5)
802       .setKeepDeletedCells(true));
803     UTIL.getHBaseAdmin().createTable(importTableDesc);
804 
805     // Run import tool.
806     args = new String[] {
807       // This will make sure that codec will encode and decode tags in rpc call.
808       "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
809       importTable.getNameAsString(),
810       FQ_OUTPUT_DIR
811     };
812     assertTrue(runImport(args));
813     // Make sure that tags exists in imported table.
814     checkWhetherTagExists(importTable, true);
815   }
816 
817   private void checkWhetherTagExists(TableName table, boolean tagExists) throws IOException {
818     List<Cell> values = new ArrayList<>();
819     for (HRegion region : UTIL.getHBaseCluster().getRegions(table)) {
820       Scan scan = new Scan();
821       // Make sure to set rawScan to true so that we will get Delete Markers.
822       scan.setRaw(true);
823       scan.setMaxVersions();
824       scan.withStartRow(ROW1);
825       // Need to use RegionScanner instead of table#getScanner since the latter will
826       // not return tags since it will go through rpc layer and remove tags intentionally.
827       RegionScanner scanner = region.getScanner(scan);
828       scanner.next(values);
829       if (!values.isEmpty()) {
830         break;
831       }
832     }
833     boolean deleteFound = false;
834     for (Cell cell: values) {
835       if (CellUtil.isDelete(cell)) {
836         deleteFound = true;
837         List<Tag> tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(),
838           cell.getTagsLength());
839         // If tagExists flag is true then validate whether tag contents are as expected.
840         if (tagExists) {
841           Assert.assertEquals(1, tags.size());
842           for (Tag tag : tags) {
843             Assert.assertEquals(TEST_TAG, Bytes.toStringBinary(tag.getValue()));
844           }
845         } else {
846           // If tagExists flag is disabled then check for 0 size tags.
847           assertEquals(0, tags.size());
848         }
849       }
850     }
851     Assert.assertTrue(deleteFound);
852   }
853 
854   /*
855     This co-proc will add a cell tag to delete mutation.
856    */
857   public static  class MetadataController
858     extends BaseRegionObserver /*implements CoprocessorService*/ {
859     @Override
860     public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> c,
861             MiniBatchOperationInProgress<Mutation> miniBatchOp) throws IOException {
862       if (c.getEnvironment().getRegion().getRegionInfo().getTable().isSystemTable()) {
863         return;
864       }
865 
866       for (int i = 0; i < miniBatchOp.size(); i++) {
867         Mutation m = miniBatchOp.getOperation(i);
868         if (!(m instanceof Delete)) {
869           continue;
870         }
871         byte[] sourceOpAttr = m.getAttribute(TEST_ATTR);
872         if (sourceOpAttr == null) {
873           continue;
874         }
875         Tag sourceOpTag = new Tag(TEST_TAG_TYPE, sourceOpAttr);
876         List<Cell> updatedCells = new ArrayList<>();
877         for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance(); ) {
878           Cell cell = cellScanner.current();
879           List<Tag> tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(),
880             cell.getTagsLength());
881           tags.add(sourceOpTag);
882           Cell updatedCell = new TagRewriteCell(cell, Tag.fromList(tags));
883           updatedCells.add(updatedCell);
884         }
885         m.getFamilyCellMap().clear();
886         // Clear and add new Cells to the Mutation.
887         for (Cell cell : updatedCells) {
888           Delete d = (Delete) m;
889           d.addDeleteMarker(cell);
890         }
891       }
892     }
893   }
894 
895   /**
896    * Set hbase.client.rpc.codec and hbase.client.default.rpc.codec both to empty string
897    * This means it will use no Codec. Make sure that we don't return Tags in response.
898    * @throws Exception Exception
899    */
900   @Test
901   public void testTagsWithEmptyCodec() throws Exception {
902     final TableName tableName = TableName.valueOf("testTagsWithEmptyCodec");
903     HTableDescriptor desc = new HTableDescriptor(tableName)
904       .addCoprocessor(MetadataController.class.getName());
905     desc.addFamily(new HColumnDescriptor(FAMILYA)
906       .setMaxVersions(5)
907       .setKeepDeletedCells(true));
908 
909     UTIL.getHBaseAdmin().createTable(desc);
910     Configuration conf = new Configuration(UTIL.getConfiguration());
911     conf.set(RPC_CODEC_CONF_KEY, "");
912     conf.set(DEFAULT_CODEC_CLASS, "");
913     try (Connection connection = ConnectionFactory.createConnection(conf);
914          Table table = connection.getTable(tableName)) {
915       //Add first version of QUAL
916       Put p = new Put(ROW1);
917       p.addColumn(FAMILYA, QUAL, now, QUAL);
918       table.put(p);
919 
920       //Add Delete family marker
921       Delete d = new Delete(ROW1, now+3);
922       // Add test attribute to delete mutation.
923       d.setAttribute(TEST_ATTR, Bytes.toBytes(TEST_TAG));
924       table.delete(d);
925 
926       // Since RPC_CODEC_CONF_KEY and DEFAULT_CODEC_CLASS is set to empty, it will use
927       // empty Codec and it shouldn't encode/decode tags.
928       Scan scan = new Scan().withStartRow(ROW1).setRaw(true);
929       ResultScanner scanner = table.getScanner(scan);
930       int count = 0;
931       Result result;
932       while ((result = scanner.next()) != null) {
933         List<Cell> cells = result.listCells();
934         assertEquals(2, cells.size());
935         Cell cell = cells.get(0);
936         assertTrue(CellUtil.isDelete(cell));
937         List<Tag> tags = Tag.asList(cell.getTagsArray(), cell.getTagsOffset(),
938           cell.getTagsLength());
939         assertEquals(0, tags.size());
940         count++;
941       }
942       assertEquals(1, count);
943     } finally {
944       UTIL.deleteTable(tableName);
945     }
946   }
947 }