View Javadoc

1   /**
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  package org.apache.hadoop.hbase.io.hfile;
20  
21  import static org.junit.Assert.assertEquals;
22  import static org.junit.Assert.assertTrue;
23  import static org.junit.Assert.fail;
24  
25  import java.io.IOException;
26  import java.lang.management.ManagementFactory;
27  import java.lang.management.MemoryUsage;
28  import java.nio.ByteBuffer;
29  import java.util.Map;
30  
31  import org.apache.commons.logging.Log;
32  import org.apache.commons.logging.LogFactory;
33  import org.apache.hadoop.conf.Configuration;
34  import org.apache.hadoop.fs.FileSystem;
35  import org.apache.hadoop.fs.Path;
36  import org.apache.hadoop.hbase.HBaseConfiguration;
37  import org.apache.hadoop.hbase.HBaseTestingUtility;
38  import org.apache.hadoop.hbase.HConstants;
39  import org.apache.hadoop.hbase.testclassification.LargeTests;
40  import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
41  import org.apache.hadoop.hbase.io.util.HeapMemorySizeUtil;
42  import org.apache.hadoop.hbase.util.Threads;
43  import org.junit.After;
44  import org.junit.Before;
45  import org.junit.Test;
46  import org.junit.experimental.categories.Category;
47  
48  /**
49   * Tests that {@link CacheConfig} does as expected.
50   */
51  // This test is marked as a large test though it runs in a short amount of time
52  // (seconds).  It is large because it depends on being able to reset the global
53  // blockcache instance which is in a global variable.  Experience has it that
54  // tests clash on the global variable if this test is run as small sized test.
55  @Category(LargeTests.class)
56  public class TestCacheConfig {
57    private static final Log LOG = LogFactory.getLog(TestCacheConfig.class);
58    private Configuration conf;
59  
60    static class Deserializer implements CacheableDeserializer<Cacheable> {
61      private final Cacheable cacheable;
62      private int deserializedIdentifier = 0;
63  
64      Deserializer(final Cacheable c) {
65        deserializedIdentifier = CacheableDeserializerIdManager.registerDeserializer(this);
66        this.cacheable = c;
67      }
68  
69      @Override
70      public int getDeserialiserIdentifier() {
71        return deserializedIdentifier;
72      }
73  
74      @Override
75      public Cacheable deserialize(ByteBuffer b, boolean reuse) throws IOException {
76        LOG.info("Deserialized " + b + ", reuse=" + reuse);
77        return cacheable;
78      }
79  
80      @Override
81      public Cacheable deserialize(ByteBuffer b) throws IOException {
82        LOG.info("Deserialized " + b);
83        return cacheable;
84      }
85    };
86  
87    static class IndexCacheEntry extends DataCacheEntry {
88      private static IndexCacheEntry SINGLETON = new IndexCacheEntry();
89  
90      public IndexCacheEntry() {
91        super(SINGLETON);
92      }
93  
94      @Override
95      public BlockType getBlockType() {
96        return BlockType.ROOT_INDEX;
97      }
98    }
99  
100   static class DataCacheEntry implements Cacheable {
101     private static final int SIZE = 1;
102     private static DataCacheEntry SINGLETON = new DataCacheEntry();
103     final CacheableDeserializer<Cacheable> deserializer;
104 
105     DataCacheEntry() {
106       this(SINGLETON);
107     }
108 
109     DataCacheEntry(final Cacheable c) {
110       this.deserializer = new Deserializer(c);
111     }
112 
113     @Override
114     public String toString() {
115       return "size=" + SIZE + ", type=" + getBlockType();
116     };
117 
118     @Override
119     public long heapSize() {
120       return SIZE;
121     }
122 
123     @Override
124     public int getSerializedLength() {
125       return SIZE;
126     }
127 
128     @Override
129     public void serialize(ByteBuffer destination, boolean includeNextBlockOnDiskSize) {
130       LOG.info("Serialized " + this + " to " + destination);
131     }
132 
133     @Override
134     public CacheableDeserializer<Cacheable> getDeserializer() {
135       return this.deserializer;
136     }
137 
138     @Override
139     public BlockType getBlockType() {
140       return BlockType.DATA;
141     }
142   };
143 
144   static class MetaCacheEntry extends DataCacheEntry {
145     @Override
146     public BlockType getBlockType() {
147       return BlockType.INTERMEDIATE_INDEX;
148     }
149   }
150 
151   @Before
152   public void setUp() throws Exception {
153     CacheConfig.clearGlobalInstances();
154     this.conf = HBaseConfiguration.create();
155   }
156 
157   @After
158   public void tearDown() throws Exception {
159     // Let go of current block cache.
160     CacheConfig.clearGlobalInstances();
161   }
162 
163   /**
164    * @param cc
165    * @param doubling If true, addition of element ups counter by 2, not 1, because element added
166    * to onheap and offheap caches.
167    * @param sizing True if we should run sizing test (doesn't always apply).
168    */
169   void basicBlockCacheOps(final CacheConfig cc, final boolean doubling,
170       final boolean sizing) {
171     assertTrue(cc.isBlockCacheEnabled());
172     assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());
173     BlockCache bc = cc.getBlockCache();
174     BlockCacheKey bck = new BlockCacheKey("f", 0);
175     Cacheable c = new DataCacheEntry();
176     // Do asserts on block counting.
177     long initialBlockCount = bc.getBlockCount();
178     bc.cacheBlock(bck, c, cc.isInMemory(), cc.isCacheDataInL1());
179     assertEquals(doubling? 2: 1, bc.getBlockCount() - initialBlockCount);
180     bc.evictBlock(bck);
181     assertEquals(initialBlockCount, bc.getBlockCount());
182     // Do size accounting.  Do it after the above 'warm-up' because it looks like some
183     // buffers do lazy allocation so sizes are off on first go around.
184     if (sizing) {
185       long originalSize = bc.getCurrentSize();
186       bc.cacheBlock(bck, c, cc.isInMemory(), cc.isCacheDataInL1());
187       assertTrue(bc.getCurrentSize() > originalSize);
188       bc.evictBlock(bck);
189       long size = bc.getCurrentSize();
190       assertEquals(originalSize, size);
191     }
192   }
193 
194   /**
195    * @param cc
196    * @param filename
197    * @return
198    */
199   private long cacheDataBlock(final CacheConfig cc, final String filename) {
200     BlockCacheKey bck = new BlockCacheKey(filename, 0);
201     Cacheable c = new DataCacheEntry();
202     // Do asserts on block counting.
203     cc.getBlockCache().cacheBlock(bck, c, cc.isInMemory(), cc.isCacheDataInL1());
204     return cc.getBlockCache().getBlockCount();
205   }
206 
207   @Test
208   public void testCacheConfigDefaultLRUBlockCache() {
209     CacheConfig cc = new CacheConfig(this.conf);
210     assertTrue(cc.isBlockCacheEnabled());
211     assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());
212     basicBlockCacheOps(cc, false, true);
213     assertTrue(cc.getBlockCache() instanceof LruBlockCache);
214   }
215 
216   /**
217    * Assert that the caches are deployed with CombinedBlockCache and of the appropriate sizes.
218    */
219   @Test
220   public void testOffHeapBucketCacheConfig() {
221     this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");
222     doBucketCacheConfigTest();
223   }
224 
225   @Test
226   public void testOnHeapBucketCacheConfig() {
227     this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "heap");
228     doBucketCacheConfigTest();
229   }
230 
231   @Test
232   public void testFileBucketCacheConfig() throws IOException {
233     HBaseTestingUtility htu = new HBaseTestingUtility(this.conf);
234     try {
235       Path p = new Path(htu.getDataTestDir(), "bc.txt");
236       FileSystem fs = FileSystem.get(this.conf);
237       fs.create(p).close();
238       this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "file:" + p);
239       doBucketCacheConfigTest();
240     } finally {
241       htu.cleanupTestDir();
242     }
243   }
244 
245   private void doBucketCacheConfigTest() {
246     final int bcSize = 100;
247     this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);
248     CacheConfig cc = new CacheConfig(this.conf);
249     basicBlockCacheOps(cc, false, false);
250     assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);
251     // TODO: Assert sizes allocated are right and proportions.
252     CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();
253     BlockCache [] bcs = cbc.getBlockCaches();
254     assertTrue(bcs[0] instanceof LruBlockCache);
255     LruBlockCache lbc = (LruBlockCache)bcs[0];
256     assertEquals(HeapMemorySizeUtil.getLruCacheSize(this.conf), lbc.getMaxSize());
257     assertTrue(bcs[1] instanceof BucketCache);
258     BucketCache bc = (BucketCache)bcs[1];
259     // getMaxSize comes back in bytes but we specified size in MB
260     assertEquals(bcSize, bc.getMaxSize() / (1024 * 1024));
261   }
262 
263   /**
264    * Assert that when BUCKET_CACHE_COMBINED_KEY is false, the non-default, that we deploy
265    * LruBlockCache as L1 with a BucketCache for L2.
266    */
267   @Test (timeout=10000)
268   public void testBucketCacheConfigL1L2Setup() {
269     this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");
270     // Make lru size is smaller than bcSize for sure.  Need this to be true so when eviction
271     // from L1 happens, it does not fail because L2 can't take the eviction because block too big.
272     this.conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.001f);
273     long lruExpectedSize = HeapMemorySizeUtil.getLruCacheSize(this.conf);
274     final int bcSize = 100;
275     long bcExpectedSize = 100 * 1024 * 1024; // MB.
276     assertTrue(lruExpectedSize < bcExpectedSize);
277     this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);
278     this.conf.setBoolean(CacheConfig.BUCKET_CACHE_COMBINED_KEY, false);
279     CacheConfig cc = new CacheConfig(this.conf);
280     basicBlockCacheOps(cc, false, false);
281     assertTrue(cc.getBlockCache() instanceof LruBlockCache);
282     // TODO: Assert sizes allocated are right and proportions.
283     LruBlockCache lbc = (LruBlockCache)cc.getBlockCache();
284     assertEquals(lruExpectedSize, lbc.getMaxSize());
285     BlockCache bc = lbc.getVictimHandler();
286     // getMaxSize comes back in bytes but we specified size in MB
287     assertEquals(bcExpectedSize, ((BucketCache) bc).getMaxSize());
288     // Test the L1+L2 deploy works as we'd expect with blocks evicted from L1 going to L2.
289     long initialL1BlockCount = lbc.getBlockCount();
290     long initialL2BlockCount = bc.getBlockCount();
291     Cacheable c = new DataCacheEntry();
292     BlockCacheKey bck = new BlockCacheKey("bck", 0);
293     lbc.cacheBlock(bck, c, false, false);
294     assertEquals(initialL1BlockCount + 1, lbc.getBlockCount());
295     assertEquals(initialL2BlockCount, bc.getBlockCount());
296     // Force evictions by putting in a block too big.
297     final long justTooBigSize = lbc.acceptableSize() + 1;
298     lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() {
299       @Override
300       public long heapSize() {
301         return justTooBigSize;
302       }
303 
304       @Override
305       public int getSerializedLength() {
306         return (int)heapSize();
307       }
308     });
309     // The eviction thread in lrublockcache needs to run.
310     while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10);
311     assertEquals(initialL1BlockCount, lbc.getBlockCount());
312     long count = bc.getBlockCount();
313     assertTrue(initialL2BlockCount + 1 <= count);
314   }
315 
316   /**
317    * Test the cacheDataInL1 flag.  When set, data blocks should be cached in the l1 tier, up in
318    * LruBlockCache when using CombinedBlockCcahe.
319    */
320   @Test
321   public void testCacheDataInL1() {
322     this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");
323     this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, 100);
324     CacheConfig cc = new CacheConfig(this.conf);
325     assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);
326     CombinedBlockCache cbc = (CombinedBlockCache)cc.getBlockCache();
327     // Add a data block.  Should go into L2, into the Bucket Cache, not the LruBlockCache.
328     cacheDataBlock(cc, "1");
329     LruBlockCache lrubc = (LruBlockCache)cbc.getBlockCaches()[0];
330     assertDataBlockCount(lrubc, 0);
331     // Enable our test flag.
332     cc.setCacheDataInL1(true);
333     cacheDataBlock(cc, "2");
334     assertDataBlockCount(lrubc, 1);
335     cc.setCacheDataInL1(false);
336     cacheDataBlock(cc, "3");
337     assertDataBlockCount(lrubc, 1);
338   }
339 
340   @Test
341   public void testL2CacheWithInvalidBucketSize() {
342     Configuration c = new Configuration(this.conf);
343     c.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");
344     c.set(CacheConfig.BUCKET_CACHE_BUCKETS_KEY, "256,512,1024,2048,4000,4096");
345     c.setFloat(HConstants.BUCKET_CACHE_SIZE_KEY, 1024);
346     try {
347       CacheConfig.getL2(c);
348       fail("Should throw IllegalArgumentException when passing illegal value for bucket size");
349     } catch (IllegalArgumentException e) {
350     }
351   }
352 
353   private void assertDataBlockCount(final LruBlockCache bc, final int expected) {
354     Map<BlockType, Integer> blocks = bc.getBlockTypeCountsForTest();
355     assertEquals(expected, blocks == null? 0:
356       blocks.get(BlockType.DATA) == null? 0:
357       blocks.get(BlockType.DATA).intValue());
358   }
359 }