View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.wal;
19  
20  import static org.junit.Assert.assertFalse;
21  import static org.junit.Assert.assertTrue;
22  
23  import java.io.IOException;
24  
25  import org.apache.commons.io.IOUtils;
26  import org.apache.commons.logging.LogFactory;
27  import org.apache.commons.logging.impl.Log4JLogger;
28  import org.apache.hadoop.conf.Configuration;
29  import org.apache.hadoop.fs.FSDataInputStream;
30  import org.apache.hadoop.fs.FileStatus;
31  import org.apache.hadoop.fs.FileSystem;
32  import org.apache.hadoop.fs.Path;
33  import org.apache.hadoop.hbase.HBaseTestingUtility;
34  import org.apache.hadoop.hbase.HColumnDescriptor;
35  import org.apache.hadoop.hbase.HConstants;
36  import org.apache.hadoop.hbase.HRegionInfo;
37  import org.apache.hadoop.hbase.HTableDescriptor;
38  import org.apache.hadoop.hbase.KeyValue;
39  import org.apache.hadoop.hbase.TableName;
40  import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
41  import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
42  import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
43  // imports for things that haven't moved from regionserver.wal yet.
44  import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
45  import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter;
46  import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogReader;
47  import org.apache.hadoop.hbase.regionserver.wal.SecureProtobufLogWriter;
48  import org.apache.hadoop.hbase.regionserver.wal.SecureWALCellCodec;
49  import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
50  import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
51  import org.apache.hadoop.hbase.testclassification.MediumTests;
52  import org.apache.hadoop.hbase.util.Bytes;
53  import org.apache.hadoop.hbase.util.FSUtils;
54  import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
55  import org.apache.log4j.Level;
56  import org.junit.BeforeClass;
57  import org.junit.Rule;
58  import org.junit.Test;
59  import org.junit.experimental.categories.Category;
60  import org.junit.rules.TestName;
61  
62  /*
63   * Test that verifies WAL written by SecureProtobufLogWriter is not readable by ProtobufLogReader
64   */
65  @Category(MediumTests.class)
66  public class TestWALReaderOnSecureWAL {
67    static {
68      ((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hbase.regionserver.wal"))
69        .getLogger().setLevel(Level.ALL);
70    };
71    static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
72    final byte[] value = Bytes.toBytes("Test value");
73  
74    private static final String WAL_ENCRYPTION = "hbase.regionserver.wal.encryption";
75  
76    @Rule
77    public TestName currentTest = new TestName();
78  
79    @BeforeClass
80    public static void setUpBeforeClass() throws Exception {
81      Configuration conf = TEST_UTIL.getConfiguration();
82      conf.set(HConstants.CRYPTO_KEYPROVIDER_CONF_KEY, KeyProviderForTesting.class.getName());
83      conf.set(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, "hbase");
84      conf.setBoolean("hbase.hlog.split.skip.errors", true);
85      conf.setBoolean(HConstants.ENABLE_WAL_ENCRYPTION, true);
86      FSUtils.setRootDir(conf, TEST_UTIL.getDataTestDir());
87    }
88  
89    private Path writeWAL(final WALFactory wals, final String tblName) throws IOException {
90      Configuration conf = TEST_UTIL.getConfiguration();
91      String clsName = conf.get(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName());
92      conf.setClass(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, SecureWALCellCodec.class,
93        WALCellCodec.class);
94      try {
95        TableName tableName = TableName.valueOf(tblName);
96        HTableDescriptor htd = new HTableDescriptor(tableName);
97        htd.addFamily(new HColumnDescriptor(tableName.getName()));
98        HRegionInfo regioninfo = new HRegionInfo(tableName,
99          HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, false);
100       final int total = 10;
101       final byte[] row = Bytes.toBytes("row");
102       final byte[] family = Bytes.toBytes("family");
103       final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl(1);
104 
105       // Write the WAL
106       WAL wal =
107           wals.getWAL(regioninfo.getEncodedNameAsBytes(), regioninfo.getTable().getNamespace());
108       for (int i = 0; i < total; i++) {
109         WALEdit kvs = new WALEdit();
110         kvs.add(new KeyValue(row, family, Bytes.toBytes(i), value));
111         wal.append(htd, regioninfo, new WALKey(regioninfo.getEncodedNameAsBytes(), tableName,
112             System.currentTimeMillis(), mvcc), kvs, true);
113       }
114       wal.sync();
115       final Path walPath = DefaultWALProvider.getCurrentFileName(wal);
116       wal.shutdown();
117       
118       return walPath;
119     } finally {
120       // restore the cell codec class
121       conf.set(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, clsName);
122     }
123   }
124   
125   @Test()
126   public void testWALReaderOnSecureWAL() throws Exception {
127     Configuration conf = TEST_UTIL.getConfiguration();
128     conf.setClass("hbase.regionserver.hlog.reader.impl", ProtobufLogReader.class,
129       WAL.Reader.class);
130     conf.setClass("hbase.regionserver.hlog.writer.impl", SecureProtobufLogWriter.class,
131       WALProvider.Writer.class);
132     conf.setBoolean(WAL_ENCRYPTION, true);
133     FileSystem fs = TEST_UTIL.getTestFileSystem();
134     final WALFactory wals = new WALFactory(conf, null, currentTest.getMethodName());
135     Path walPath = writeWAL(wals, currentTest.getMethodName());
136 
137     // Insure edits are not plaintext
138     long length = fs.getFileStatus(walPath).getLen();
139     FSDataInputStream in = fs.open(walPath);
140     byte[] fileData = new byte[(int)length];
141     IOUtils.readFully(in, fileData);
142     in.close();
143     assertFalse("Cells appear to be plaintext", Bytes.contains(fileData, value));
144 
145     // Confirm the WAL cannot be read back by ProtobufLogReader
146     try {
147       wals.createReader(TEST_UTIL.getTestFileSystem(), walPath);
148       assertFalse(true);
149     } catch (IOException ioe) {
150       // expected IOE
151     }
152     
153     FileStatus[] listStatus = fs.listStatus(walPath.getParent());
154     RecoveryMode mode = (conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ? 
155         RecoveryMode.LOG_REPLAY : RecoveryMode.LOG_SPLITTING);
156     Path rootdir = FSUtils.getRootDir(conf);
157     try {
158       WALSplitter s = new WALSplitter(wals, conf, rootdir, fs, rootdir, fs, null, null, mode);
159       s.splitLogFile(listStatus[0], null);
160       Path file = new Path(ZKSplitLog.getSplitLogDir(rootdir, listStatus[0].getPath().getName()),
161         "corrupt");
162       assertTrue(fs.exists(file));
163       // assertFalse("log splitting should have failed", true);
164     } catch (IOException ioe) {
165       assertTrue("WAL should have been sidelined", false);
166     }
167     wals.close();
168   }
169   
170   @Test()
171   public void testSecureWALReaderOnWAL() throws Exception {
172     Configuration conf = TEST_UTIL.getConfiguration();
173     conf.setClass("hbase.regionserver.hlog.reader.impl", SecureProtobufLogReader.class,
174       WAL.Reader.class);
175     conf.setClass("hbase.regionserver.hlog.writer.impl", ProtobufLogWriter.class,
176       WALProvider.Writer.class);
177     conf.setBoolean(WAL_ENCRYPTION, false);
178     FileSystem fs = TEST_UTIL.getTestFileSystem();
179     final WALFactory wals = new WALFactory(conf, null, currentTest.getMethodName());
180     Path walPath = writeWAL(wals, currentTest.getMethodName());
181 
182     // Ensure edits are plaintext
183     long length = fs.getFileStatus(walPath).getLen();
184     FSDataInputStream in = fs.open(walPath);
185     byte[] fileData = new byte[(int)length];
186     IOUtils.readFully(in, fileData);
187     in.close();
188     assertTrue("Cells should be plaintext", Bytes.contains(fileData, value));
189 
190     // Confirm the WAL can be read back by SecureProtobufLogReader
191     try {
192       WAL.Reader reader = wals.createReader(TEST_UTIL.getTestFileSystem(), walPath);
193       reader.close();
194     } catch (IOException ioe) {
195       assertFalse(true);
196     }
197     
198     FileStatus[] listStatus = fs.listStatus(walPath.getParent());
199     RecoveryMode mode = (conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, false) ? 
200         RecoveryMode.LOG_REPLAY : RecoveryMode.LOG_SPLITTING);
201     Path rootdir = FSUtils.getRootDir(conf);
202     try {
203       WALSplitter s = new WALSplitter(wals, conf, rootdir, fs, rootdir, fs, null, null, mode);
204       s.splitLogFile(listStatus[0], null);
205       Path file = new Path(ZKSplitLog.getSplitLogDir(rootdir, listStatus[0].getPath().getName()),
206         "corrupt");
207       assertTrue(!fs.exists(file));
208     } catch (IOException ioe) {
209       assertTrue("WAL should have been processed", false);
210     }
211     wals.close();
212   }
213 }