View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one or more
3    * contributor license agreements. See the NOTICE file distributed with this
4    * work for additional information regarding copyright ownership. The ASF
5    * licenses this file to you under the Apache License, Version 2.0 (the
6    * "License"); you may not use this file except in compliance with the License.
7    * You may obtain a copy of the License at
8    *
9    * http://www.apache.org/licenses/LICENSE-2.0
10   *
11   * Unless required by applicable law or agreed to in writing, software
12   * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13   * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14   * License for the specific language governing permissions and limitations
15   * under the License.
16   */
17  package org.apache.hadoop.hbase.io.encoding;
18  
19  import static org.apache.hadoop.hbase.io.compress.Compression.Algorithm.NONE;
20  
21  import java.io.ByteArrayInputStream;
22  import java.io.DataOutputStream;
23  import java.io.IOException;
24  import java.io.InputStream;
25  import java.security.SecureRandom;
26  
27  import org.apache.hadoop.hbase.classification.InterfaceAudience;
28  import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
29  import org.apache.hadoop.hbase.io.TagCompressionContext;
30  import org.apache.hadoop.hbase.io.compress.Compression;
31  import org.apache.hadoop.hbase.io.crypto.Cipher;
32  import org.apache.hadoop.hbase.io.crypto.Encryption;
33  import org.apache.hadoop.hbase.io.crypto.Encryptor;
34  import org.apache.hadoop.hbase.io.hfile.BlockType;
35  import org.apache.hadoop.hbase.io.hfile.HFileContext;
36  import org.apache.hadoop.io.compress.CompressionOutputStream;
37  import org.apache.hadoop.io.compress.Compressor;
38  
39  import com.google.common.base.Preconditions;
40  import org.apache.hadoop.hbase.util.Bytes;
41  
42  /**
43   * A default implementation of {@link HFileBlockEncodingContext}. It will
44   * compress the data section as one continuous buffer.
45   *
46   * @see HFileBlockDefaultDecodingContext for the decompression part
47   *
48   */
49  @InterfaceAudience.Private
50  public class HFileBlockDefaultEncodingContext implements
51      HFileBlockEncodingContext {
52    private BlockType blockType;
53    private final DataBlockEncoding encodingAlgo;
54  
55    private byte[] dummyHeader;
56  
57    // Compression state
58  
59    /** Compressor, which is also reused between consecutive blocks. */
60    private Compressor compressor;
61    /** Compression output stream */
62    private CompressionOutputStream compressionStream;
63    /** Underlying stream to write compressed bytes to */
64    private ByteArrayOutputStream compressedByteStream;
65  
66    private HFileContext fileContext;
67    private TagCompressionContext tagCompressionContext;
68  
69    // Encryption state
70  
71    /** Underlying stream to write encrypted bytes to */
72    private ByteArrayOutputStream cryptoByteStream;
73    /** Initialization vector */
74    private byte[] iv;
75  
76    private EncodingState encoderState;
77  
78    /**
79     * @param encoding encoding used
80     * @param headerBytes dummy header bytes
81     * @param fileContext HFile meta data
82     */
83    public HFileBlockDefaultEncodingContext(DataBlockEncoding encoding, byte[] headerBytes,
84        HFileContext fileContext) {
85      this.encodingAlgo = encoding;
86      this.fileContext = fileContext;
87      Compression.Algorithm compressionAlgorithm =
88          fileContext.getCompression() == null ? NONE : fileContext.getCompression();
89      if (compressionAlgorithm != NONE) {
90        compressor = compressionAlgorithm.getCompressor();
91        compressedByteStream = new ByteArrayOutputStream();
92        try {
93          compressionStream =
94              compressionAlgorithm.createPlainCompressionStream(
95                  compressedByteStream, compressor);
96        } catch (IOException e) {
97          throw new RuntimeException(
98              "Could not create compression stream for algorithm "
99                  + compressionAlgorithm, e);
100       }
101     }
102 
103     Encryption.Context cryptoContext = fileContext.getEncryptionContext();
104     if (cryptoContext != Encryption.Context.NONE) {
105       cryptoByteStream = new ByteArrayOutputStream();
106       iv = new byte[cryptoContext.getCipher().getIvLength()];
107       new SecureRandom().nextBytes(iv);
108     }
109 
110     dummyHeader = Preconditions.checkNotNull(headerBytes,
111       "Please pass HConstants.HFILEBLOCK_DUMMY_HEADER instead of null for param headerBytes");
112   }
113 
114   /**
115    * prepare to start a new encoding.
116    * @throws IOException
117    */
118   public void prepareEncoding(DataOutputStream out) throws IOException {
119     if (encodingAlgo != null && encodingAlgo != DataBlockEncoding.NONE) {
120       encodingAlgo.writeIdInBytes(out);
121     }
122   }
123 
124   @Override
125   public void postEncoding(BlockType blockType)
126       throws IOException {
127     this.blockType = blockType;
128   }
129 
130   @Override
131   public Bytes compressAndEncrypt(byte[] data, int offset, int length) throws IOException {
132     return compressAfterEncoding(data, offset, length, dummyHeader);
133   }
134 
135   private Bytes compressAfterEncoding(byte[] uncompressedBytesWithHeaderBuffer,
136         int uncompressedBytesWithHeaderOffset, int uncompressedBytesWithHeaderLength, byte[] headerBytes)
137       throws IOException {
138     Encryption.Context cryptoContext = fileContext.getEncryptionContext();
139     if (cryptoContext != Encryption.Context.NONE) {
140 
141       // Encrypted block format:
142       // +--------------------------+
143       // | byte iv length           |
144       // +--------------------------+
145       // | iv data ...              |
146       // +--------------------------+
147       // | encrypted block data ... |
148       // +--------------------------+
149 
150       cryptoByteStream.reset();
151       // Write the block header (plaintext)
152       cryptoByteStream.write(headerBytes);
153 
154       InputStream in;
155       int plaintextLength;
156       // Run any compression before encryption
157       if (fileContext.getCompression() != Compression.Algorithm.NONE) {
158         compressedByteStream.reset();
159         compressionStream.resetState();
160         compressionStream.write(uncompressedBytesWithHeaderBuffer,
161             headerBytes.length + uncompressedBytesWithHeaderOffset, uncompressedBytesWithHeaderLength - headerBytes.length);
162         compressionStream.flush();
163         compressionStream.finish();
164         byte[] plaintext = compressedByteStream.toByteArray();
165         plaintextLength = plaintext.length;
166         in = new ByteArrayInputStream(plaintext);
167       } else {
168         plaintextLength = uncompressedBytesWithHeaderLength - headerBytes.length;
169         in = new ByteArrayInputStream(uncompressedBytesWithHeaderBuffer,
170           headerBytes.length + uncompressedBytesWithHeaderOffset, plaintextLength);
171       }
172 
173       if (plaintextLength > 0) {
174 
175         // Set up the cipher
176         Cipher cipher = cryptoContext.getCipher();
177         Encryptor encryptor = cipher.getEncryptor();
178         encryptor.setKey(cryptoContext.getKey());
179 
180         // Set up the IV
181         int ivLength = iv.length;
182         Preconditions.checkState(ivLength <= Byte.MAX_VALUE, "IV length out of range");
183         cryptoByteStream.write(ivLength);
184         if (ivLength > 0) {
185           encryptor.setIv(iv);
186           cryptoByteStream.write(iv);
187         }
188 
189         // Encrypt the data
190         Encryption.encrypt(cryptoByteStream, in, encryptor);
191 
192         // Increment the IV given the final block size
193         Encryption.incrementIv(iv, 1 + (cryptoByteStream.size() / encryptor.getBlockSize()));
194         return new Bytes(cryptoByteStream.getBuffer(), 0, cryptoByteStream.size());
195       } else {
196 
197         cryptoByteStream.write(0);
198         return new Bytes(cryptoByteStream.getBuffer(), 0, cryptoByteStream.size());
199       }
200 
201     } else {
202 
203       if (this.fileContext.getCompression() != NONE) {
204         compressedByteStream.reset();
205         compressedByteStream.write(headerBytes);
206         compressionStream.resetState();
207         compressionStream.write(uncompressedBytesWithHeaderBuffer,
208           headerBytes.length + uncompressedBytesWithHeaderOffset, uncompressedBytesWithHeaderLength
209               - headerBytes.length);
210         compressionStream.flush();
211         compressionStream.finish();
212         return new Bytes(compressedByteStream.getBuffer(), 0, compressedByteStream.size());
213       } else {
214         return null;
215       }
216     }
217   }
218 
219   @Override
220   public BlockType getBlockType() {
221     return blockType;
222   }
223 
224   /**
225    * Releases the compressor this writer uses to compress blocks into the
226    * compressor pool.
227    */
228   @Override
229   public void close() {
230     if (compressor != null) {
231       this.fileContext.getCompression().returnCompressor(compressor);
232       compressor = null;
233     }
234   }
235 
236   @Override
237   public DataBlockEncoding getDataBlockEncoding() {
238     return this.encodingAlgo;
239   }
240 
241   @Override
242   public HFileContext getHFileContext() {
243     return this.fileContext;
244   }
245 
246   public TagCompressionContext getTagCompressionContext() {
247     return tagCompressionContext;
248   }
249 
250   public void setTagCompressionContext(TagCompressionContext tagCompressionContext) {
251     this.tagCompressionContext = tagCompressionContext;
252   }
253 
254   @Override
255   public EncodingState getEncodingState() {
256     return this.encoderState;
257   }
258 
259   @Override
260   public void setEncodingState(EncodingState state) {
261     this.encoderState = state;
262   }
263 }