1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.io.hfile;
20
21 import com.google.common.util.concurrent.ThreadFactoryBuilder;
22 import java.lang.ref.WeakReference;
23 import java.util.EnumMap;
24 import java.util.Iterator;
25 import java.util.List;
26 import java.util.Map;
27 import java.util.PriorityQueue;
28 import java.util.SortedSet;
29 import java.util.TreeSet;
30 import java.util.concurrent.ConcurrentHashMap;
31 import java.util.concurrent.Executors;
32 import java.util.concurrent.ScheduledExecutorService;
33 import java.util.concurrent.TimeUnit;
34 import java.util.concurrent.atomic.AtomicLong;
35 import java.util.concurrent.locks.ReentrantLock;
36 import com.google.common.base.Objects;
37 import org.apache.commons.logging.Log;
38 import org.apache.commons.logging.LogFactory;
39 import org.apache.hadoop.hbase.classification.InterfaceAudience;
40 import org.apache.hadoop.conf.Configuration;
41 import org.apache.hadoop.hbase.io.HeapSize;
42 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
43 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
44 import org.apache.hadoop.hbase.util.Bytes;
45 import org.apache.hadoop.hbase.util.ClassSize;
46 import org.apache.hadoop.hbase.util.HasThread;
47 import org.apache.hadoop.util.StringUtils;
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93 @InterfaceAudience.Private
94 public class LruBlockCache implements ResizableBlockCache, HeapSize {
95
96 private static final Log LOG = LogFactory.getLog(LruBlockCache.class);
97
98
99
100
101
102 static final String LRU_MIN_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.min.factor";
103
104
105
106
107 static final String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.acceptable.factor";
108
109
110
111
112 static final String LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.hard.capacity.limit.factor";
113 static final String LRU_SINGLE_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.single.percentage";
114 static final String LRU_MULTI_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.multi.percentage";
115 static final String LRU_MEMORY_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.memory.percentage";
116
117
118
119
120
121
122 static final String LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME = "hbase.lru.rs.inmemoryforcemode";
123
124
125
126
127 static final float DEFAULT_LOAD_FACTOR = 0.75f;
128 static final int DEFAULT_CONCURRENCY_LEVEL = 16;
129
130
131 static final float DEFAULT_MIN_FACTOR = 0.95f;
132 static final float DEFAULT_ACCEPTABLE_FACTOR = 0.99f;
133
134
135 static final float DEFAULT_SINGLE_FACTOR = 0.25f;
136 static final float DEFAULT_MULTI_FACTOR = 0.50f;
137 static final float DEFAULT_MEMORY_FACTOR = 0.25f;
138
139
140 static final float DEFAULT_HARD_CAPACITY_LIMIT_FACTOR = 1.2f;
141
142 static final boolean DEFAULT_IN_MEMORY_FORCE_MODE = false;
143
144
145 static final int statThreadPeriod = 60 * 5;
146 private static final String LRU_MAX_BLOCK_SIZE = "hbase.lru.max.block.size";
147 private static final long DEFAULT_MAX_BLOCK_SIZE = 16L * 1024L * 1024L;
148
149
150 private transient final Map<BlockCacheKey,LruCachedBlock> map;
151
152
153 private transient final ReentrantLock evictionLock = new ReentrantLock(true);
154 private final long maxBlockSize;
155
156
157 private volatile boolean evictionInProgress = false;
158
159
160 private transient final EvictionThread evictionThread;
161
162
163 private transient final ScheduledExecutorService scheduleThreadPool =
164 Executors.newScheduledThreadPool(1,
165 new ThreadFactoryBuilder().setNameFormat("LruBlockCacheStatsExecutor")
166 .setDaemon(true).build());
167
168
169 private final AtomicLong size;
170
171
172 private final AtomicLong dataBlockSize;
173
174
175 private final AtomicLong elements;
176
177
178 private final AtomicLong dataBlockElements;
179
180
181 private final AtomicLong count;
182
183
184 private float hardCapacityLimitFactor;
185
186
187 private final CacheStats stats;
188
189
190 private long maxSize;
191
192
193 private long blockSize;
194
195
196 private float acceptableFactor;
197
198
199 private float minFactor;
200
201
202 private float singleFactor;
203
204
205 private float multiFactor;
206
207
208 private float memoryFactor;
209
210
211 private long overhead;
212
213
214 private boolean forceInMemory;
215
216
217 private transient BlockCache victimHandler = null;
218
219
220
221
222
223
224
225
226
227
228 public LruBlockCache(long maxSize, long blockSize) {
229 this(maxSize, blockSize, true);
230 }
231
232
233
234
235 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread) {
236 this(maxSize, blockSize, evictionThread,
237 (int)Math.ceil(1.2*maxSize/blockSize),
238 DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL,
239 DEFAULT_MIN_FACTOR, DEFAULT_ACCEPTABLE_FACTOR,
240 DEFAULT_SINGLE_FACTOR,
241 DEFAULT_MULTI_FACTOR,
242 DEFAULT_MEMORY_FACTOR,
243 DEFAULT_HARD_CAPACITY_LIMIT_FACTOR,
244 false,
245 DEFAULT_MAX_BLOCK_SIZE
246 );
247 }
248
249 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Configuration conf) {
250 this(maxSize, blockSize, evictionThread,
251 (int)Math.ceil(1.2*maxSize/blockSize),
252 DEFAULT_LOAD_FACTOR,
253 DEFAULT_CONCURRENCY_LEVEL,
254 conf.getFloat(LRU_MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR),
255 conf.getFloat(LRU_ACCEPTABLE_FACTOR_CONFIG_NAME, DEFAULT_ACCEPTABLE_FACTOR),
256 conf.getFloat(LRU_SINGLE_PERCENTAGE_CONFIG_NAME, DEFAULT_SINGLE_FACTOR),
257 conf.getFloat(LRU_MULTI_PERCENTAGE_CONFIG_NAME, DEFAULT_MULTI_FACTOR),
258 conf.getFloat(LRU_MEMORY_PERCENTAGE_CONFIG_NAME, DEFAULT_MEMORY_FACTOR),
259 conf.getFloat(LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME, DEFAULT_HARD_CAPACITY_LIMIT_FACTOR),
260 conf.getBoolean(LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME, DEFAULT_IN_MEMORY_FORCE_MODE),
261 conf.getLong(LRU_MAX_BLOCK_SIZE, DEFAULT_MAX_BLOCK_SIZE)
262 );
263 }
264
265 public LruBlockCache(long maxSize, long blockSize, Configuration conf) {
266 this(maxSize, blockSize, true, conf);
267 }
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread,
284 int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel,
285 float minFactor, float acceptableFactor, float singleFactor,
286 float multiFactor, float memoryFactor, float hardLimitFactor,
287 boolean forceInMemory, long maxBlockSize) {
288 this.maxBlockSize = maxBlockSize;
289 if(singleFactor + multiFactor + memoryFactor != 1 ||
290 singleFactor < 0 || multiFactor < 0 || memoryFactor < 0) {
291 throw new IllegalArgumentException("Single, multi, and memory factors " +
292 " should be non-negative and total 1.0");
293 }
294 if(minFactor >= acceptableFactor) {
295 throw new IllegalArgumentException("minFactor must be smaller than acceptableFactor");
296 }
297 if(minFactor >= 1.0f || acceptableFactor >= 1.0f) {
298 throw new IllegalArgumentException("all factors must be < 1");
299 }
300 this.maxSize = maxSize;
301 this.blockSize = blockSize;
302 this.forceInMemory = forceInMemory;
303 map = new ConcurrentHashMap<BlockCacheKey,LruCachedBlock>(mapInitialSize,
304 mapLoadFactor, mapConcurrencyLevel);
305 this.minFactor = minFactor;
306 this.acceptableFactor = acceptableFactor;
307 this.singleFactor = singleFactor;
308 this.multiFactor = multiFactor;
309 this.memoryFactor = memoryFactor;
310 this.stats = new CacheStats(this.getClass().getSimpleName());
311 this.count = new AtomicLong(0);
312 this.elements = new AtomicLong(0);
313 this.dataBlockElements = new AtomicLong(0);
314 this.dataBlockSize = new AtomicLong(0);
315 this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
316 this.size = new AtomicLong(this.overhead);
317 this.hardCapacityLimitFactor = hardLimitFactor;
318 if(evictionThread) {
319 this.evictionThread = new EvictionThread(this);
320 this.evictionThread.start();
321 } else {
322 this.evictionThread = null;
323 }
324
325
326 this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),
327 statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);
328 }
329
330 @Override
331 public void setMaxSize(long maxSize) {
332 this.maxSize = maxSize;
333 if(this.size.get() > acceptableSize() && !evictionInProgress) {
334 runEviction();
335 }
336 }
337
338
339
340
341
342
343
344
345
346
347
348
349
350 @Override
351 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory,
352 final boolean cacheDataInL1) {
353
354 if (buf.heapSize() > maxBlockSize) {
355
356
357
358 if (stats.failInsert() % 50 == 0) {
359 LOG.warn("Trying to cache too large a block "
360 + cacheKey.getHfileName() + " @ "
361 + cacheKey.getOffset()
362 + " is " + buf.heapSize()
363 + " which is larger than " + maxBlockSize);
364 }
365 return;
366 }
367
368 LruCachedBlock cb = map.get(cacheKey);
369 if (!cacheDataInL1 && cb != null
370 && !BlockCacheUtil.shouldReplaceExistingCacheBlock(this, cacheKey, buf)) {
371 return;
372 }
373 long currentSize = size.get();
374 long currentAcceptableSize = acceptableSize();
375 long hardLimitSize = (long) (hardCapacityLimitFactor * currentAcceptableSize);
376 if (currentSize >= hardLimitSize) {
377 stats.failInsert();
378 if (LOG.isTraceEnabled()) {
379 LOG.trace("LruBlockCache current size " + StringUtils.byteDesc(currentSize)
380 + " has exceeded acceptable size " + StringUtils.byteDesc(currentAcceptableSize) + " too many."
381 + " the hard limit size is " + StringUtils.byteDesc(hardLimitSize) + ", failed to put cacheKey:"
382 + cacheKey + " into LruBlockCache.");
383 }
384 if (!evictionInProgress) {
385 runEviction();
386 }
387 return;
388 }
389 cb = new LruCachedBlock(cacheKey, buf, count.incrementAndGet(), inMemory);
390 long newSize = updateSizeMetrics(cb, false);
391 map.put(cacheKey, cb);
392 long val = elements.incrementAndGet();
393 if (buf.getBlockType().isData()) {
394 dataBlockElements.incrementAndGet();
395 }
396 if (LOG.isTraceEnabled()) {
397 long size = map.size();
398 assertCounterSanity(size, val);
399 }
400 if (newSize > currentAcceptableSize && !evictionInProgress) {
401 runEviction();
402 }
403 }
404
405
406
407
408
409 private static void assertCounterSanity(long mapSize, long counterVal) {
410 if (counterVal < 0) {
411 LOG.trace("counterVal overflow. Assertions unreliable. counterVal=" + counterVal +
412 ", mapSize=" + mapSize);
413 return;
414 }
415 if (mapSize < Integer.MAX_VALUE) {
416 double pct_diff = Math.abs((((double) counterVal) / ((double) mapSize)) - 1.);
417 if (pct_diff > 0.05) {
418 LOG.trace("delta between reported and actual size > 5%. counterVal=" + counterVal +
419 ", mapSize=" + mapSize);
420 }
421 }
422 }
423
424
425
426
427
428
429
430 @Override
431 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {
432 cacheBlock(cacheKey, buf, false, false);
433 }
434
435
436
437
438
439
440
441
442
443 protected long updateSizeMetrics(LruCachedBlock cb, boolean evict) {
444 long heapsize = cb.heapSize();
445 BlockType bt = cb.getBuffer().getBlockType();
446 if (evict) {
447 heapsize *= -1;
448 }
449 if (bt != null && bt.isData()) {
450 dataBlockSize.addAndGet(heapsize);
451 }
452 return size.addAndGet(heapsize);
453 }
454
455
456
457
458
459
460
461
462
463
464 @Override
465 public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat,
466 boolean updateCacheMetrics) {
467 LruCachedBlock cb = map.get(cacheKey);
468 if (cb == null) {
469 if (!repeat && updateCacheMetrics) {
470 stats.miss(caching, cacheKey.isPrimary(), cacheKey.getBlockType());
471 }
472
473
474
475 if (victimHandler != null && !repeat) {
476 Cacheable result = victimHandler.getBlock(cacheKey, caching, repeat, updateCacheMetrics);
477
478
479 if (result != null && caching) {
480 cacheBlock(cacheKey, result, false, true);
481 }
482 return result;
483 }
484 return null;
485 }
486 if (updateCacheMetrics) stats.hit(caching, cacheKey.isPrimary(), cacheKey.getBlockType());
487 cb.access(count.incrementAndGet());
488 return cb.getBuffer();
489 }
490
491
492
493
494
495
496 public boolean containsBlock(BlockCacheKey cacheKey) {
497 return map.containsKey(cacheKey);
498 }
499
500 @Override
501 public boolean evictBlock(BlockCacheKey cacheKey) {
502 LruCachedBlock cb = map.get(cacheKey);
503 if (cb == null) return false;
504 return evictBlock(cb, false) > 0;
505 }
506
507
508
509
510
511
512
513
514
515
516
517 @Override
518 public int evictBlocksByHfileName(String hfileName) {
519 int numEvicted = 0;
520 for (BlockCacheKey key : map.keySet()) {
521 if (key.getHfileName().equals(hfileName)) {
522 if (evictBlock(key))
523 ++numEvicted;
524 }
525 }
526 if (victimHandler != null) {
527 numEvicted += victimHandler.evictBlocksByHfileName(hfileName);
528 }
529 return numEvicted;
530 }
531
532
533
534
535
536
537
538
539
540 protected long evictBlock(LruCachedBlock block, boolean evictedByEvictionProcess) {
541 boolean found = map.remove(block.getCacheKey()) != null;
542 if (!found) {
543 return 0;
544 }
545 updateSizeMetrics(block, true);
546 long val = elements.decrementAndGet();
547 if (LOG.isTraceEnabled()) {
548 long size = map.size();
549 assertCounterSanity(size, val);
550 }
551 if (block.getBuffer().getBlockType().isData()) {
552 dataBlockElements.decrementAndGet();
553 }
554 stats.evicted(block.getCachedTime(), block.getCacheKey().isPrimary());
555 if (evictedByEvictionProcess && victimHandler != null) {
556 if (victimHandler instanceof BucketCache) {
557 boolean wait = getCurrentSize() < acceptableSize();
558 boolean inMemory = block.getPriority() == BlockPriority.MEMORY;
559 ((BucketCache)victimHandler).cacheBlockWithWait(block.getCacheKey(), block.getBuffer(),
560 inMemory, true, wait);
561 } else {
562 victimHandler.cacheBlock(block.getCacheKey(), block.getBuffer());
563 }
564 }
565 return block.heapSize();
566 }
567
568
569
570
571 private void runEviction() {
572 if(evictionThread == null) {
573 evict();
574 } else {
575 evictionThread.evict();
576 }
577 }
578
579 boolean isEvictionInProgress() {
580 return evictionInProgress;
581 }
582
583 long getOverhead() {
584 return overhead;
585 }
586
587
588
589
590 void evict() {
591
592
593 if(!evictionLock.tryLock()) return;
594
595 try {
596 evictionInProgress = true;
597 long currentSize = this.size.get();
598 long bytesToFree = currentSize - minSize();
599
600 if (LOG.isTraceEnabled()) {
601 LOG.trace("Block cache LRU eviction started; Attempting to free " +
602 StringUtils.byteDesc(bytesToFree) + " of total=" +
603 StringUtils.byteDesc(currentSize));
604 }
605
606 if(bytesToFree <= 0) return;
607
608
609 BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize,
610 singleSize());
611 BlockBucket bucketMulti = new BlockBucket("multi", bytesToFree, blockSize,
612 multiSize());
613 BlockBucket bucketMemory = new BlockBucket("memory", bytesToFree, blockSize,
614 memorySize());
615
616
617 for(LruCachedBlock cachedBlock : map.values()) {
618 switch(cachedBlock.getPriority()) {
619 case SINGLE: {
620 bucketSingle.add(cachedBlock);
621 break;
622 }
623 case MULTI: {
624 bucketMulti.add(cachedBlock);
625 break;
626 }
627 case MEMORY: {
628 bucketMemory.add(cachedBlock);
629 break;
630 }
631 }
632 }
633
634 long bytesFreed = 0;
635 if (forceInMemory || memoryFactor > 0.999f) {
636 long s = bucketSingle.totalSize();
637 long m = bucketMulti.totalSize();
638 if (bytesToFree > (s + m)) {
639
640
641 bytesFreed = bucketSingle.free(s);
642 bytesFreed += bucketMulti.free(m);
643 if (LOG.isTraceEnabled()) {
644 LOG.trace("freed " + StringUtils.byteDesc(bytesFreed) +
645 " from single and multi buckets");
646 }
647 bytesFreed += bucketMemory.free(bytesToFree - bytesFreed);
648 if (LOG.isTraceEnabled()) {
649 LOG.trace("freed " + StringUtils.byteDesc(bytesFreed) +
650 " total from all three buckets ");
651 }
652 } else {
653
654
655
656 long bytesRemain = s + m - bytesToFree;
657 if (3 * s <= bytesRemain) {
658
659
660 bytesFreed = bucketMulti.free(bytesToFree);
661 } else if (3 * m <= 2 * bytesRemain) {
662
663
664 bytesFreed = bucketSingle.free(bytesToFree);
665 } else {
666
667 bytesFreed = bucketSingle.free(s - bytesRemain / 3);
668 if (bytesFreed < bytesToFree) {
669 bytesFreed += bucketMulti.free(bytesToFree - bytesFreed);
670 }
671 }
672 }
673 } else {
674 PriorityQueue<BlockBucket> bucketQueue =
675 new PriorityQueue<BlockBucket>(3);
676
677 bucketQueue.add(bucketSingle);
678 bucketQueue.add(bucketMulti);
679 bucketQueue.add(bucketMemory);
680
681 int remainingBuckets = bucketQueue.size();
682
683 BlockBucket bucket;
684 while((bucket = bucketQueue.poll()) != null) {
685 long overflow = bucket.overflow();
686 if(overflow > 0) {
687 long bucketBytesToFree = Math.min(overflow,
688 (bytesToFree - bytesFreed) / remainingBuckets);
689 bytesFreed += bucket.free(bucketBytesToFree);
690 }
691 remainingBuckets--;
692 }
693 }
694 if (LOG.isTraceEnabled()) {
695 long single = bucketSingle.totalSize();
696 long multi = bucketMulti.totalSize();
697 long memory = bucketMemory.totalSize();
698 LOG.trace("Block cache LRU eviction completed; " +
699 "freed=" + StringUtils.byteDesc(bytesFreed) + ", " +
700 "total=" + StringUtils.byteDesc(this.size.get()) + ", " +
701 "single=" + StringUtils.byteDesc(single) + ", " +
702 "multi=" + StringUtils.byteDesc(multi) + ", " +
703 "memory=" + StringUtils.byteDesc(memory));
704 }
705 } finally {
706 stats.evict();
707 evictionInProgress = false;
708 evictionLock.unlock();
709 }
710 }
711
712 @Override
713 public String toString() {
714 return Objects.toStringHelper(this)
715 .add("blockCount", getBlockCount())
716 .add("currentSize", getCurrentSize())
717 .add("freeSize", getFreeSize())
718 .add("maxSize", getMaxSize())
719 .add("heapSize", heapSize())
720 .add("minSize", minSize())
721 .add("minFactor", minFactor)
722 .add("multiSize", multiSize())
723 .add("multiFactor", multiFactor)
724 .add("singleSize", singleSize())
725 .add("singleFactor", singleFactor)
726 .toString();
727 }
728
729
730
731
732
733
734
735 private class BlockBucket implements Comparable<BlockBucket> {
736
737 private final String name;
738 private LruCachedBlockQueue queue;
739 private long totalSize = 0;
740 private long bucketSize;
741
742 public BlockBucket(String name, long bytesToFree, long blockSize, long bucketSize) {
743 this.name = name;
744 this.bucketSize = bucketSize;
745 queue = new LruCachedBlockQueue(bytesToFree, blockSize);
746 totalSize = 0;
747 }
748
749 public void add(LruCachedBlock block) {
750 totalSize += block.heapSize();
751 queue.add(block);
752 }
753
754 public long free(long toFree) {
755 if (LOG.isTraceEnabled()) {
756 LOG.trace("freeing " + StringUtils.byteDesc(toFree) + " from " + this);
757 }
758 LruCachedBlock cb;
759 long freedBytes = 0;
760 while ((cb = queue.pollLast()) != null) {
761 freedBytes += evictBlock(cb, true);
762 if (freedBytes >= toFree) {
763 return freedBytes;
764 }
765 }
766 if (LOG.isTraceEnabled()) {
767 LOG.trace("freed " + StringUtils.byteDesc(freedBytes) + " from " + this);
768 }
769 return freedBytes;
770 }
771
772 public long overflow() {
773 return totalSize - bucketSize;
774 }
775
776 public long totalSize() {
777 return totalSize;
778 }
779
780 @Override
781 public int compareTo(BlockBucket that) {
782 return Long.compare(this.overflow(), that.overflow());
783 }
784
785 @Override
786 public boolean equals(Object that) {
787 if (that == null || !(that instanceof BlockBucket)){
788 return false;
789 }
790 return compareTo((BlockBucket)that) == 0;
791 }
792
793 @Override
794 public int hashCode() {
795 return Objects.hashCode(name, bucketSize, queue, totalSize);
796 }
797
798 @Override
799 public String toString() {
800 return Objects.toStringHelper(this)
801 .add("name", name)
802 .add("totalSize", StringUtils.byteDesc(totalSize))
803 .add("bucketSize", StringUtils.byteDesc(bucketSize))
804 .toString();
805 }
806 }
807
808
809
810
811
812
813 @Override
814 public long getMaxSize() {
815 return this.maxSize;
816 }
817
818 @Override
819 public long getCurrentSize() {
820 return this.size.get();
821 }
822
823 @Override
824 public long getCurrentDataSize() {
825 return this.dataBlockSize.get();
826 }
827
828 @Override
829 public long getFreeSize() {
830 return getMaxSize() - getCurrentSize();
831 }
832
833 @Override
834 public long size() {
835 return getMaxSize();
836 }
837
838 @Override
839 public long getBlockCount() {
840 return this.elements.get();
841 }
842
843 @Override
844 public long getDataBlockCount() {
845 return this.dataBlockElements.get();
846 }
847
848 EvictionThread getEvictionThread() {
849 return this.evictionThread;
850 }
851
852
853
854
855
856
857
858 static class EvictionThread extends HasThread {
859 private WeakReference<LruBlockCache> cache;
860 private volatile boolean go = true;
861
862 private boolean enteringRun = false;
863
864 public EvictionThread(LruBlockCache cache) {
865 super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
866 setDaemon(true);
867 this.cache = new WeakReference<LruBlockCache>(cache);
868 }
869
870 @Override
871 public void run() {
872 enteringRun = true;
873 while (this.go) {
874 synchronized(this) {
875 try {
876 this.wait(1000 * 10
877 } catch(InterruptedException e) {
878 LOG.warn("Interrupted eviction thread ", e);
879 Thread.currentThread().interrupt();
880 }
881 }
882 LruBlockCache cache = this.cache.get();
883 if (cache == null) break;
884 cache.evict();
885 }
886 }
887
888 @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NN_NAKED_NOTIFY",
889 justification="This is what we want")
890 public void evict() {
891 synchronized(this) {
892 this.notifyAll();
893 }
894 }
895
896 synchronized void shutdown() {
897 this.go = false;
898 this.notifyAll();
899 }
900
901
902
903
904 boolean isEnteringRun() {
905 return this.enteringRun;
906 }
907 }
908
909
910
911
912 static class StatisticsThread extends Thread {
913 private final LruBlockCache lru;
914
915 public StatisticsThread(LruBlockCache lru) {
916 super("LruBlockCacheStats");
917 setDaemon(true);
918 this.lru = lru;
919 }
920
921 @Override
922 public void run() {
923 lru.logStats();
924 }
925 }
926
927 public void logStats() {
928
929 long totalSize = heapSize();
930 long freeSize = maxSize - totalSize;
931 LruBlockCache.LOG.info("totalSize=" + StringUtils.byteDesc(totalSize) + ", " +
932 "freeSize=" + StringUtils.byteDesc(freeSize) + ", " +
933 "max=" + StringUtils.byteDesc(this.maxSize) + ", " +
934 "blockCount=" + getBlockCount() + ", " +
935 "accesses=" + stats.getRequestCount() + ", " +
936 "hits=" + stats.getHitCount() + ", " +
937 "hitRatio=" + (stats.getHitCount() == 0 ?
938 "0" : (StringUtils.formatPercent(stats.getHitRatio(), 2)+ ", ")) + ", " +
939 "cachingAccesses=" + stats.getRequestCachingCount() + ", " +
940 "cachingHits=" + stats.getHitCachingCount() + ", " +
941 "cachingHitsRatio=" + (stats.getHitCachingCount() == 0 ?
942 "0,": (StringUtils.formatPercent(stats.getHitCachingRatio(), 2) + ", ")) +
943 "evictions=" + stats.getEvictionCount() + ", " +
944 "evicted=" + stats.getEvictedCount() + ", " +
945 "evictedPerRun=" + stats.evictedPerEviction());
946 }
947
948
949
950
951
952
953
954 @Override
955 public CacheStats getStats() {
956 return this.stats;
957 }
958
959 public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
960 (4 * Bytes.SIZEOF_LONG) + (11 * ClassSize.REFERENCE) +
961 (6 * Bytes.SIZEOF_FLOAT) + (2 * Bytes.SIZEOF_BOOLEAN)
962 + ClassSize.OBJECT);
963
964 @Override
965 public long heapSize() {
966 return getCurrentSize();
967 }
968
969 public static long calculateOverhead(long maxSize, long blockSize, int concurrency){
970
971 return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
972 ((long)Math.ceil(maxSize*1.2/blockSize)
973 * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
974 ((long)concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
975 }
976
977 @Override
978 public Iterator<CachedBlock> iterator() {
979 final Iterator<LruCachedBlock> iterator = map.values().iterator();
980
981 return new Iterator<CachedBlock>() {
982 private final long now = System.nanoTime();
983
984 @Override
985 public boolean hasNext() {
986 return iterator.hasNext();
987 }
988
989 @Override
990 public CachedBlock next() {
991 final LruCachedBlock b = iterator.next();
992 return new CachedBlock() {
993 @Override
994 public String toString() {
995 return BlockCacheUtil.toString(this, now);
996 }
997
998 @Override
999 public BlockPriority getBlockPriority() {
1000 return b.getPriority();
1001 }
1002
1003 @Override
1004 public BlockType getBlockType() {
1005 return b.getBuffer().getBlockType();
1006 }
1007
1008 @Override
1009 public long getOffset() {
1010 return b.getCacheKey().getOffset();
1011 }
1012
1013 @Override
1014 public long getSize() {
1015 return b.getBuffer().heapSize();
1016 }
1017
1018 @Override
1019 public long getCachedTime() {
1020 return b.getCachedTime();
1021 }
1022
1023 @Override
1024 public String getFilename() {
1025 return b.getCacheKey().getHfileName();
1026 }
1027
1028 @Override
1029 public int compareTo(CachedBlock other) {
1030 int diff = this.getFilename().compareTo(other.getFilename());
1031 if (diff != 0) return diff;
1032 diff = Long.compare(this.getOffset(), other.getOffset());
1033 if (diff != 0) return diff;
1034 if (other.getCachedTime() < 0 || this.getCachedTime() < 0) {
1035 throw new IllegalStateException("" + this.getCachedTime() + ", " +
1036 other.getCachedTime());
1037 }
1038 return Long.compare(other.getCachedTime(), this.getCachedTime());
1039 }
1040
1041 @Override
1042 public int hashCode() {
1043 return b.hashCode();
1044 }
1045
1046 @Override
1047 public boolean equals(Object obj) {
1048 if (obj instanceof CachedBlock) {
1049 CachedBlock cb = (CachedBlock)obj;
1050 return compareTo(cb) == 0;
1051 } else {
1052 return false;
1053 }
1054 }
1055 };
1056 }
1057
1058 @Override
1059 public void remove() {
1060 throw new UnsupportedOperationException();
1061 }
1062 };
1063 }
1064
1065
1066
1067 long acceptableSize() {
1068 return (long)Math.floor(this.maxSize * this.acceptableFactor);
1069 }
1070
1071 private long minSize() {
1072 return (long)Math.floor(this.maxSize * this.minFactor);
1073 }
1074
1075 private long singleSize() {
1076 return (long)Math.floor(this.maxSize * this.singleFactor * this.minFactor);
1077 }
1078
1079 private long multiSize() {
1080 return (long)Math.floor(this.maxSize * this.multiFactor * this.minFactor);
1081 }
1082
1083 private long memorySize() {
1084 return (long)Math.floor(this.maxSize * this.memoryFactor * this.minFactor);
1085 }
1086
1087 @Override
1088 public void shutdown() {
1089 if (victimHandler != null)
1090 victimHandler.shutdown();
1091 this.scheduleThreadPool.shutdown();
1092 for (int i = 0; i < 10; i++) {
1093 if (!this.scheduleThreadPool.isShutdown()) {
1094 try {
1095 Thread.sleep(10);
1096 } catch (InterruptedException e) {
1097 LOG.warn("Interrupted while sleeping");
1098 Thread.currentThread().interrupt();
1099 break;
1100 }
1101 }
1102 }
1103
1104 if (!this.scheduleThreadPool.isShutdown()) {
1105 List<Runnable> runnables = this.scheduleThreadPool.shutdownNow();
1106 LOG.debug("Still running " + runnables);
1107 }
1108 this.evictionThread.shutdown();
1109 }
1110
1111
1112 public void clearCache() {
1113 this.map.clear();
1114 this.elements.set(0);
1115 }
1116
1117
1118
1119
1120
1121 SortedSet<String> getCachedFileNamesForTest() {
1122 SortedSet<String> fileNames = new TreeSet<String>();
1123 for (BlockCacheKey cacheKey : map.keySet()) {
1124 fileNames.add(cacheKey.getHfileName());
1125 }
1126 return fileNames;
1127 }
1128
1129 Map<BlockType, Integer> getBlockTypeCountsForTest() {
1130 Map<BlockType, Integer> counts =
1131 new EnumMap<BlockType, Integer>(BlockType.class);
1132 for (LruCachedBlock cb : map.values()) {
1133 BlockType blockType = ((Cacheable)cb.getBuffer()).getBlockType();
1134 Integer count = counts.get(blockType);
1135 counts.put(blockType, (count == null ? 0 : count) + 1);
1136 }
1137 return counts;
1138 }
1139
1140 public Map<DataBlockEncoding, Integer> getEncodingCountsForTest() {
1141 Map<DataBlockEncoding, Integer> counts =
1142 new EnumMap<DataBlockEncoding, Integer>(DataBlockEncoding.class);
1143 for (LruCachedBlock block : map.values()) {
1144 DataBlockEncoding encoding =
1145 ((HFileBlock) block.getBuffer()).getDataBlockEncoding();
1146 Integer count = counts.get(encoding);
1147 counts.put(encoding, (count == null ? 0 : count) + 1);
1148 }
1149 return counts;
1150 }
1151
1152 public void setVictimCache(BlockCache handler) {
1153 assert victimHandler == null;
1154 victimHandler = handler;
1155 }
1156
1157 Map<BlockCacheKey, LruCachedBlock> getMapForTests() {
1158 return map;
1159 }
1160
1161 BlockCache getVictimHandler() {
1162 return this.victimHandler;
1163 }
1164
1165 @Override
1166 public BlockCache[] getBlockCaches() {
1167 if (victimHandler != null) {
1168 return new BlockCache[]{this, this.victimHandler};
1169 }
1170 return null;
1171 }
1172 }