View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.util.compaction;
19  
20  import java.io.IOException;
21  import java.util.Random;
22  import com.google.common.collect.Sets;
23  import org.apache.hadoop.hbase.HBaseTestingUtility;
24  import org.apache.hadoop.hbase.TableName;
25  import org.apache.hadoop.hbase.client.Connection;
26  import org.apache.hadoop.hbase.client.HBaseAdmin;
27  import org.apache.hadoop.hbase.client.Put;
28  import org.apache.hadoop.hbase.client.Table;
29  import org.apache.hadoop.hbase.testclassification.MediumTests;
30  import org.apache.hadoop.hbase.testclassification.MiscTests;
31  import org.apache.hadoop.hbase.util.Bytes;
32  import org.junit.After;
33  import static org.junit.Assert.assertEquals;
34  import static org.junit.Assert.assertTrue;
35  import org.junit.Before;
36  import org.junit.Test;
37  import org.junit.experimental.categories.Category;
38  
39  
40  @Category({ MiscTests.class, MediumTests.class })
41  public class MajorCompactorTest {
42  
43    public static final byte[] FAMILY = Bytes.toBytes("a");
44    protected HBaseTestingUtility utility;
45    protected HBaseAdmin admin;
46  
47    @Before public void setUp() throws Exception {
48      utility = new HBaseTestingUtility();
49      utility.getConfiguration().setInt("hbase.hfile.compaction.discharger.interval", 10);
50      utility.startMiniCluster();
51    }
52  
53    @After public void tearDown() throws Exception {
54      utility.shutdownMiniCluster();
55    }
56  
57    @Test public void testCompactingATable() throws Exception {
58      TableName tableName = TableName.valueOf("MajorCompactorTest");
59      utility.createMultiRegionTable(tableName, FAMILY, 5);
60      utility.waitTableAvailable(tableName);
61      Connection connection = utility.getConnection();
62      Table table = connection.getTable(tableName);
63      // write data and flush multiple store files:
64      for (int i = 0; i < 5; i++) {
65        loadRandomRows(table, FAMILY, 50, 100);
66        utility.flush(tableName);
67      }
68      table.close();
69      int numberOfRegions = utility.getHBaseAdmin().getTableRegions(tableName).size();
70      int numHFiles = utility.getNumHFiles(tableName, FAMILY);
71      // we should have a table with more store files than we would before we major compacted.
72      assertTrue(numberOfRegions < numHFiles);
73  
74      MajorCompactor compactor =
75          new MajorCompactor(utility.getConfiguration(), tableName,
76              Sets.newHashSet(Bytes.toString(FAMILY)), 1, System.currentTimeMillis(), 200);
77      compactor.initializeWorkQueues();
78      compactor.compactAllRegions();
79      compactor.shutdown();
80  
81      // verify that the store has been completely major compacted.
82      numberOfRegions = utility.getHBaseAdmin().getTableRegions(tableName).size();
83      numHFiles = utility.getNumHFiles(tableName, FAMILY);
84      assertEquals(numHFiles, numberOfRegions);
85    }
86  
87    protected void loadRandomRows(final Table t, final byte[] f, int rowSize, int totalRows)
88        throws IOException {
89      Random r = new Random();
90      byte[] row = new byte[rowSize];
91      for (int i = 0; i < totalRows; i++) {
92        r.nextBytes(row);
93        Put put = new Put(row);
94        put.addColumn(f, new byte[]{0}, new byte[]{0});
95        t.put(put);
96      }
97    }
98  }