1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.io.hfile;
20
21 import java.lang.ref.WeakReference;
22 import java.nio.ByteBuffer;
23 import java.util.EnumMap;
24 import java.util.Iterator;
25 import java.util.List;
26 import java.util.Map;
27 import java.util.PriorityQueue;
28 import java.util.SortedSet;
29 import java.util.TreeSet;
30 import java.util.concurrent.ConcurrentHashMap;
31 import java.util.concurrent.Executors;
32 import java.util.concurrent.ScheduledExecutorService;
33 import java.util.concurrent.TimeUnit;
34 import java.util.concurrent.atomic.AtomicLong;
35 import java.util.concurrent.locks.ReentrantLock;
36
37 import com.google.common.base.Objects;
38 import org.apache.commons.logging.Log;
39 import org.apache.commons.logging.LogFactory;
40 import org.apache.hadoop.hbase.classification.InterfaceAudience;
41 import org.apache.hadoop.conf.Configuration;
42 import org.apache.hadoop.hbase.io.HeapSize;
43 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
44 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
45 import org.apache.hadoop.hbase.util.Bytes;
46 import org.apache.hadoop.hbase.util.ClassSize;
47 import org.apache.hadoop.hbase.util.HasThread;
48 import org.apache.hadoop.util.StringUtils;
49 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
50
51 import com.google.common.annotations.VisibleForTesting;
52 import com.google.common.util.concurrent.ThreadFactoryBuilder;
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98 @InterfaceAudience.Private
99 @JsonIgnoreProperties({"encodingCountsForTest"})
100 public class LruBlockCache implements ResizableBlockCache, HeapSize {
101
102 static final Log LOG = LogFactory.getLog(LruBlockCache.class);
103
104
105
106
107
108 static final String LRU_MIN_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.min.factor";
109
110
111
112
113 static final String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.acceptable.factor";
114
115 static final String LRU_SINGLE_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.single.percentage";
116 static final String LRU_MULTI_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.multi.percentage";
117 static final String LRU_MEMORY_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.memory.percentage";
118
119
120
121
122
123
124 static final String LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME = "hbase.lru.rs.inmemoryforcemode";
125
126
127
128
129 static final float DEFAULT_LOAD_FACTOR = 0.75f;
130 static final int DEFAULT_CONCURRENCY_LEVEL = 16;
131
132
133 static final float DEFAULT_MIN_FACTOR = 0.95f;
134 static final float DEFAULT_ACCEPTABLE_FACTOR = 0.99f;
135
136
137 static final float DEFAULT_SINGLE_FACTOR = 0.25f;
138 static final float DEFAULT_MULTI_FACTOR = 0.50f;
139 static final float DEFAULT_MEMORY_FACTOR = 0.25f;
140
141 static final boolean DEFAULT_IN_MEMORY_FORCE_MODE = false;
142
143
144 static final int statThreadPeriod = 60 * 5;
145
146
147 private final Map<BlockCacheKey,LruCachedBlock> map;
148
149
150 private final ReentrantLock evictionLock = new ReentrantLock(true);
151
152
153 private volatile boolean evictionInProgress = false;
154
155
156 private final EvictionThread evictionThread;
157
158
159 private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,
160 new ThreadFactoryBuilder().setNameFormat("LruBlockCacheStatsExecutor").setDaemon(true).build());
161
162
163 private final AtomicLong size;
164
165
166 private final AtomicLong elements;
167
168
169 private final AtomicLong count;
170
171
172 private final CacheStats stats;
173
174
175 private long maxSize;
176
177
178 private long blockSize;
179
180
181 private float acceptableFactor;
182
183
184 private float minFactor;
185
186
187 private float singleFactor;
188
189
190 private float multiFactor;
191
192
193 private float memoryFactor;
194
195
196 private long overhead;
197
198
199 private boolean forceInMemory;
200
201
202
203 private BucketCache victimHandler = null;
204
205
206
207
208
209
210
211
212
213
214 public LruBlockCache(long maxSize, long blockSize) {
215 this(maxSize, blockSize, true);
216 }
217
218
219
220
221 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread) {
222 this(maxSize, blockSize, evictionThread,
223 (int)Math.ceil(1.2*maxSize/blockSize),
224 DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL,
225 DEFAULT_MIN_FACTOR, DEFAULT_ACCEPTABLE_FACTOR,
226 DEFAULT_SINGLE_FACTOR,
227 DEFAULT_MULTI_FACTOR,
228 DEFAULT_MEMORY_FACTOR,
229 false
230 );
231 }
232
233 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Configuration conf) {
234 this(maxSize, blockSize, evictionThread,
235 (int)Math.ceil(1.2*maxSize/blockSize),
236 DEFAULT_LOAD_FACTOR,
237 DEFAULT_CONCURRENCY_LEVEL,
238 conf.getFloat(LRU_MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR),
239 conf.getFloat(LRU_ACCEPTABLE_FACTOR_CONFIG_NAME, DEFAULT_ACCEPTABLE_FACTOR),
240 conf.getFloat(LRU_SINGLE_PERCENTAGE_CONFIG_NAME, DEFAULT_SINGLE_FACTOR),
241 conf.getFloat(LRU_MULTI_PERCENTAGE_CONFIG_NAME, DEFAULT_MULTI_FACTOR),
242 conf.getFloat(LRU_MEMORY_PERCENTAGE_CONFIG_NAME, DEFAULT_MEMORY_FACTOR),
243 conf.getBoolean(LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME, DEFAULT_IN_MEMORY_FORCE_MODE)
244 );
245 }
246
247 public LruBlockCache(long maxSize, long blockSize, Configuration conf) {
248 this(maxSize, blockSize, true, conf);
249 }
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread,
266 int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel,
267 float minFactor, float acceptableFactor, float singleFactor,
268 float multiFactor, float memoryFactor, boolean forceInMemory) {
269 if(singleFactor + multiFactor + memoryFactor != 1 ||
270 singleFactor < 0 || multiFactor < 0 || memoryFactor < 0) {
271 throw new IllegalArgumentException("Single, multi, and memory factors " +
272 " should be non-negative and total 1.0");
273 }
274 if(minFactor >= acceptableFactor) {
275 throw new IllegalArgumentException("minFactor must be smaller than acceptableFactor");
276 }
277 if(minFactor >= 1.0f || acceptableFactor >= 1.0f) {
278 throw new IllegalArgumentException("all factors must be < 1");
279 }
280 this.maxSize = maxSize;
281 this.blockSize = blockSize;
282 this.forceInMemory = forceInMemory;
283 map = new ConcurrentHashMap<BlockCacheKey,LruCachedBlock>(mapInitialSize,
284 mapLoadFactor, mapConcurrencyLevel);
285 this.minFactor = minFactor;
286 this.acceptableFactor = acceptableFactor;
287 this.singleFactor = singleFactor;
288 this.multiFactor = multiFactor;
289 this.memoryFactor = memoryFactor;
290 this.stats = new CacheStats(this.getClass().getSimpleName());
291 this.count = new AtomicLong(0);
292 this.elements = new AtomicLong(0);
293 this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
294 this.size = new AtomicLong(this.overhead);
295 if(evictionThread) {
296 this.evictionThread = new EvictionThread(this);
297 this.evictionThread.start();
298 } else {
299 this.evictionThread = null;
300 }
301
302
303 this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),
304 statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);
305 }
306
307 @Override
308 public void setMaxSize(long maxSize) {
309 this.maxSize = maxSize;
310 if(this.size.get() > acceptableSize() && !evictionInProgress) {
311 runEviction();
312 }
313 }
314
315
316
317
318
319
320
321
322
323
324
325
326
327 @Override
328 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory,
329 final boolean cacheDataInL1) {
330 LruCachedBlock cb = map.get(cacheKey);
331 if (cb != null) {
332
333 if (compare(buf, cb.getBuffer()) != 0) {
334 throw new RuntimeException("Cached block contents differ, which should not have happened."
335 + "cacheKey:" + cacheKey);
336 }
337 String msg = "Cached an already cached block: " + cacheKey + " cb:" + cb.getCacheKey();
338 msg += ". This is harmless and can happen in rare cases (see HBASE-8547)";
339 LOG.warn(msg);
340 return;
341 }
342 cb = new LruCachedBlock(cacheKey, buf, count.incrementAndGet(), inMemory);
343 long newSize = updateSizeMetrics(cb, false);
344 map.put(cacheKey, cb);
345 long val = elements.incrementAndGet();
346 if (LOG.isTraceEnabled()) {
347 long size = map.size();
348 assertCounterSanity(size, val);
349 }
350 if (newSize > acceptableSize() && !evictionInProgress) {
351 runEviction();
352 }
353 }
354
355
356
357
358
359 private static void assertCounterSanity(long mapSize, long counterVal) {
360 if (counterVal < 0) {
361 LOG.trace("counterVal overflow. Assertions unreliable. counterVal=" + counterVal +
362 ", mapSize=" + mapSize);
363 return;
364 }
365 if (mapSize < Integer.MAX_VALUE) {
366 double pct_diff = Math.abs((((double) counterVal) / ((double) mapSize)) - 1.);
367 if (pct_diff > 0.05) {
368 LOG.trace("delta between reported and actual size > 5%. counterVal=" + counterVal +
369 ", mapSize=" + mapSize);
370 }
371 }
372 }
373
374 private int compare(Cacheable left, Cacheable right) {
375 ByteBuffer l = ByteBuffer.allocate(left.getSerializedLength());
376 left.serialize(l);
377 ByteBuffer r = ByteBuffer.allocate(right.getSerializedLength());
378 right.serialize(r);
379 return Bytes.compareTo(l.array(), l.arrayOffset(), l.limit(),
380 r.array(), r.arrayOffset(), r.limit());
381 }
382
383
384
385
386
387
388
389 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {
390 cacheBlock(cacheKey, buf, false, false);
391 }
392
393
394
395
396
397
398
399
400
401 protected long updateSizeMetrics(LruCachedBlock cb, boolean evict) {
402 long heapsize = cb.heapSize();
403 if (evict) {
404 heapsize *= -1;
405 }
406 return size.addAndGet(heapsize);
407 }
408
409
410
411
412
413
414
415
416
417
418 @Override
419 public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat,
420 boolean updateCacheMetrics) {
421 LruCachedBlock cb = map.get(cacheKey);
422 if (cb == null) {
423 if (!repeat && updateCacheMetrics) stats.miss(caching);
424 if (victimHandler != null) {
425 return victimHandler.getBlock(cacheKey, caching, repeat, updateCacheMetrics);
426 }
427 return null;
428 }
429 if (updateCacheMetrics) stats.hit(caching);
430 cb.access(count.incrementAndGet());
431 return cb.getBuffer();
432 }
433
434
435
436
437
438
439 public boolean containsBlock(BlockCacheKey cacheKey) {
440 return map.containsKey(cacheKey);
441 }
442
443 @Override
444 public boolean evictBlock(BlockCacheKey cacheKey) {
445 LruCachedBlock cb = map.get(cacheKey);
446 if (cb == null) return false;
447 evictBlock(cb, false);
448 return true;
449 }
450
451
452
453
454
455
456
457
458
459
460
461 @Override
462 public int evictBlocksByHfileName(String hfileName) {
463 int numEvicted = 0;
464 for (BlockCacheKey key : map.keySet()) {
465 if (key.getHfileName().equals(hfileName)) {
466 if (evictBlock(key))
467 ++numEvicted;
468 }
469 }
470 if (victimHandler != null) {
471 numEvicted += victimHandler.evictBlocksByHfileName(hfileName);
472 }
473 return numEvicted;
474 }
475
476
477
478
479
480
481
482
483
484 protected long evictBlock(LruCachedBlock block, boolean evictedByEvictionProcess) {
485 map.remove(block.getCacheKey());
486 updateSizeMetrics(block, true);
487 long val = elements.decrementAndGet();
488 if (LOG.isTraceEnabled()) {
489 long size = map.size();
490 assertCounterSanity(size, val);
491 }
492 stats.evicted(block.getCachedTime());
493 if (evictedByEvictionProcess && victimHandler != null) {
494 boolean wait = getCurrentSize() < acceptableSize();
495 boolean inMemory = block.getPriority() == BlockPriority.MEMORY;
496 victimHandler.cacheBlockWithWait(block.getCacheKey(), block.getBuffer(),
497 inMemory, wait);
498 }
499 return block.heapSize();
500 }
501
502
503
504
505 private void runEviction() {
506 if(evictionThread == null) {
507 evict();
508 } else {
509 evictionThread.evict();
510 }
511 }
512
513
514
515
516 void evict() {
517
518
519 if(!evictionLock.tryLock()) return;
520
521 try {
522 evictionInProgress = true;
523 long currentSize = this.size.get();
524 long bytesToFree = currentSize - minSize();
525
526 if (LOG.isTraceEnabled()) {
527 LOG.trace("Block cache LRU eviction started; Attempting to free " +
528 StringUtils.byteDesc(bytesToFree) + " of total=" +
529 StringUtils.byteDesc(currentSize));
530 }
531
532 if(bytesToFree <= 0) return;
533
534
535 BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize,
536 singleSize());
537 BlockBucket bucketMulti = new BlockBucket("multi", bytesToFree, blockSize,
538 multiSize());
539 BlockBucket bucketMemory = new BlockBucket("memory", bytesToFree, blockSize,
540 memorySize());
541
542
543 for(LruCachedBlock cachedBlock : map.values()) {
544 switch(cachedBlock.getPriority()) {
545 case SINGLE: {
546 bucketSingle.add(cachedBlock);
547 break;
548 }
549 case MULTI: {
550 bucketMulti.add(cachedBlock);
551 break;
552 }
553 case MEMORY: {
554 bucketMemory.add(cachedBlock);
555 break;
556 }
557 }
558 }
559
560 long bytesFreed = 0;
561 if (forceInMemory || memoryFactor > 0.999f) {
562 long s = bucketSingle.totalSize();
563 long m = bucketMulti.totalSize();
564 if (bytesToFree > (s + m)) {
565
566
567 bytesFreed = bucketSingle.free(s);
568 bytesFreed += bucketMulti.free(m);
569 if (LOG.isTraceEnabled()) {
570 LOG.trace("freed " + StringUtils.byteDesc(bytesFreed) +
571 " from single and multi buckets");
572 }
573 bytesFreed += bucketMemory.free(bytesToFree - bytesFreed);
574 if (LOG.isTraceEnabled()) {
575 LOG.trace("freed " + StringUtils.byteDesc(bytesFreed) +
576 " total from all three buckets ");
577 }
578 } else {
579
580
581
582 long bytesRemain = s + m - bytesToFree;
583 if (3 * s <= bytesRemain) {
584
585
586 bytesFreed = bucketMulti.free(bytesToFree);
587 } else if (3 * m <= 2 * bytesRemain) {
588
589
590 bytesFreed = bucketSingle.free(bytesToFree);
591 } else {
592
593 bytesFreed = bucketSingle.free(s - bytesRemain / 3);
594 if (bytesFreed < bytesToFree) {
595 bytesFreed += bucketMulti.free(bytesToFree - bytesFreed);
596 }
597 }
598 }
599 } else {
600 PriorityQueue<BlockBucket> bucketQueue =
601 new PriorityQueue<BlockBucket>(3);
602
603 bucketQueue.add(bucketSingle);
604 bucketQueue.add(bucketMulti);
605 bucketQueue.add(bucketMemory);
606
607 int remainingBuckets = 3;
608
609 BlockBucket bucket;
610 while((bucket = bucketQueue.poll()) != null) {
611 long overflow = bucket.overflow();
612 if(overflow > 0) {
613 long bucketBytesToFree = Math.min(overflow,
614 (bytesToFree - bytesFreed) / remainingBuckets);
615 bytesFreed += bucket.free(bucketBytesToFree);
616 }
617 remainingBuckets--;
618 }
619 }
620
621 if (LOG.isTraceEnabled()) {
622 long single = bucketSingle.totalSize();
623 long multi = bucketMulti.totalSize();
624 long memory = bucketMemory.totalSize();
625 LOG.trace("Block cache LRU eviction completed; " +
626 "freed=" + StringUtils.byteDesc(bytesFreed) + ", " +
627 "total=" + StringUtils.byteDesc(this.size.get()) + ", " +
628 "single=" + StringUtils.byteDesc(single) + ", " +
629 "multi=" + StringUtils.byteDesc(multi) + ", " +
630 "memory=" + StringUtils.byteDesc(memory));
631 }
632 } finally {
633 stats.evict();
634 evictionInProgress = false;
635 evictionLock.unlock();
636 }
637 }
638
639 @Override
640 public String toString() {
641 return Objects.toStringHelper(this)
642 .add("blockCount", getBlockCount())
643 .add("currentSize", getCurrentSize())
644 .add("freeSize", getFreeSize())
645 .add("maxSize", getMaxSize())
646 .add("heapSize", heapSize())
647 .add("minSize", minSize())
648 .add("minFactor", minFactor)
649 .add("multiSize", multiSize())
650 .add("multiFactor", multiFactor)
651 .add("singleSize", singleSize())
652 .add("singleFactor", singleFactor)
653 .toString();
654 }
655
656
657
658
659
660
661
662 private class BlockBucket implements Comparable<BlockBucket> {
663
664 private final String name;
665 private LruCachedBlockQueue queue;
666 private long totalSize = 0;
667 private long bucketSize;
668
669 public BlockBucket(String name, long bytesToFree, long blockSize, long bucketSize) {
670 this.name = name;
671 this.bucketSize = bucketSize;
672 queue = new LruCachedBlockQueue(bytesToFree, blockSize);
673 totalSize = 0;
674 }
675
676 public void add(LruCachedBlock block) {
677 totalSize += block.heapSize();
678 queue.add(block);
679 }
680
681 public long free(long toFree) {
682 if (LOG.isTraceEnabled()) {
683 LOG.trace("freeing " + StringUtils.byteDesc(toFree) + " from " + this);
684 }
685 LruCachedBlock cb;
686 long freedBytes = 0;
687 while ((cb = queue.pollLast()) != null) {
688 freedBytes += evictBlock(cb, true);
689 if (freedBytes >= toFree) {
690 return freedBytes;
691 }
692 }
693 if (LOG.isTraceEnabled()) {
694 LOG.trace("freed " + StringUtils.byteDesc(freedBytes) + " from " + this);
695 }
696 return freedBytes;
697 }
698
699 public long overflow() {
700 return totalSize - bucketSize;
701 }
702
703 public long totalSize() {
704 return totalSize;
705 }
706
707 public int compareTo(BlockBucket that) {
708 if(this.overflow() == that.overflow()) return 0;
709 return this.overflow() > that.overflow() ? 1 : -1;
710 }
711
712 @Override
713 public boolean equals(Object that) {
714 if (that == null || !(that instanceof BlockBucket)){
715 return false;
716 }
717 return compareTo((BlockBucket)that) == 0;
718 }
719
720 @Override
721 public int hashCode() {
722 return Objects.hashCode(name, bucketSize, queue, totalSize);
723 }
724
725 @Override
726 public String toString() {
727 return Objects.toStringHelper(this)
728 .add("name", name)
729 .add("totalSize", StringUtils.byteDesc(totalSize))
730 .add("bucketSize", StringUtils.byteDesc(bucketSize))
731 .toString();
732 }
733 }
734
735
736
737
738
739 public long getMaxSize() {
740 return this.maxSize;
741 }
742
743 @Override
744 public long getCurrentSize() {
745 return this.size.get();
746 }
747
748 @Override
749 public long getFreeSize() {
750 return getMaxSize() - getCurrentSize();
751 }
752
753 @Override
754 public long size() {
755 return getMaxSize();
756 }
757
758 @Override
759 public long getBlockCount() {
760 return this.elements.get();
761 }
762
763 EvictionThread getEvictionThread() {
764 return this.evictionThread;
765 }
766
767
768
769
770
771
772
773 static class EvictionThread extends HasThread {
774 private WeakReference<LruBlockCache> cache;
775 private volatile boolean go = true;
776
777 private boolean enteringRun = false;
778
779 public EvictionThread(LruBlockCache cache) {
780 super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
781 setDaemon(true);
782 this.cache = new WeakReference<LruBlockCache>(cache);
783 }
784
785 @Override
786 public void run() {
787 enteringRun = true;
788 while (this.go) {
789 synchronized(this) {
790 try {
791 this.wait(1000 * 10
792 } catch(InterruptedException e) {}
793 }
794 LruBlockCache cache = this.cache.get();
795 if (cache == null) break;
796 cache.evict();
797 }
798 }
799
800 @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NN_NAKED_NOTIFY",
801 justification="This is what we want")
802 public void evict() {
803 synchronized(this) {
804 this.notifyAll();
805 }
806 }
807
808 synchronized void shutdown() {
809 this.go = false;
810 this.notifyAll();
811 }
812
813
814
815
816 boolean isEnteringRun() {
817 return this.enteringRun;
818 }
819 }
820
821
822
823
824 static class StatisticsThread extends Thread {
825 private final LruBlockCache lru;
826
827 public StatisticsThread(LruBlockCache lru) {
828 super("LruBlockCacheStats");
829 setDaemon(true);
830 this.lru = lru;
831 }
832
833 @Override
834 public void run() {
835 lru.logStats();
836 }
837 }
838
839 public void logStats() {
840
841 long totalSize = heapSize();
842 long freeSize = maxSize - totalSize;
843 LruBlockCache.LOG.info("totalSize=" + StringUtils.byteDesc(totalSize) + ", " +
844 "freeSize=" + StringUtils.byteDesc(freeSize) + ", " +
845 "max=" + StringUtils.byteDesc(this.maxSize) + ", " +
846 "blockCount=" + getBlockCount() + ", " +
847 "accesses=" + stats.getRequestCount() + ", " +
848 "hits=" + stats.getHitCount() + ", " +
849 "hitRatio=" + (stats.getHitCount() == 0 ?
850 "0" : (StringUtils.formatPercent(stats.getHitRatio(), 2)+ ", ")) + ", " +
851 "cachingAccesses=" + stats.getRequestCachingCount() + ", " +
852 "cachingHits=" + stats.getHitCachingCount() + ", " +
853 "cachingHitsRatio=" + (stats.getHitCachingCount() == 0 ?
854 "0,": (StringUtils.formatPercent(stats.getHitCachingRatio(), 2) + ", ")) +
855 "evictions=" + stats.getEvictionCount() + ", " +
856 "evicted=" + stats.getEvictedCount() + ", " +
857 "evictedPerRun=" + stats.evictedPerEviction());
858 }
859
860
861
862
863
864
865
866 public CacheStats getStats() {
867 return this.stats;
868 }
869
870 public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
871 (3 * Bytes.SIZEOF_LONG) + (9 * ClassSize.REFERENCE) +
872 (5 * Bytes.SIZEOF_FLOAT) + Bytes.SIZEOF_BOOLEAN
873 + ClassSize.OBJECT);
874
875 @Override
876 public long heapSize() {
877 return getCurrentSize();
878 }
879
880 public static long calculateOverhead(long maxSize, long blockSize, int concurrency){
881
882 return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
883 ((long)Math.ceil(maxSize*1.2/blockSize)
884 * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
885 ((long)concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
886 }
887
888 @Override
889 public Iterator<CachedBlock> iterator() {
890 final Iterator<LruCachedBlock> iterator = map.values().iterator();
891
892 return new Iterator<CachedBlock>() {
893 private final long now = System.nanoTime();
894
895 @Override
896 public boolean hasNext() {
897 return iterator.hasNext();
898 }
899
900 @Override
901 public CachedBlock next() {
902 final LruCachedBlock b = iterator.next();
903 return new CachedBlock() {
904 @Override
905 public String toString() {
906 return BlockCacheUtil.toString(this, now);
907 }
908
909 @Override
910 public BlockPriority getBlockPriority() {
911 return b.getPriority();
912 }
913
914 @Override
915 public BlockType getBlockType() {
916 return b.getBuffer().getBlockType();
917 }
918
919 @Override
920 public long getOffset() {
921 return b.getCacheKey().getOffset();
922 }
923
924 @Override
925 public long getSize() {
926 return b.getBuffer().heapSize();
927 }
928
929 @Override
930 public long getCachedTime() {
931 return b.getCachedTime();
932 }
933
934 @Override
935 public String getFilename() {
936 return b.getCacheKey().getHfileName();
937 }
938
939 @Override
940 public int compareTo(CachedBlock other) {
941 int diff = this.getFilename().compareTo(other.getFilename());
942 if (diff != 0) return diff;
943 diff = (int)(this.getOffset() - other.getOffset());
944 if (diff != 0) return diff;
945 if (other.getCachedTime() < 0 || this.getCachedTime() < 0) {
946 throw new IllegalStateException("" + this.getCachedTime() + ", " +
947 other.getCachedTime());
948 }
949 return (int)(other.getCachedTime() - this.getCachedTime());
950 }
951
952 @Override
953 public int hashCode() {
954 return b.hashCode();
955 }
956
957 @Override
958 public boolean equals(Object obj) {
959 if (obj instanceof CachedBlock) {
960 CachedBlock cb = (CachedBlock)obj;
961 return compareTo(cb) == 0;
962 } else {
963 return false;
964 }
965 }
966 };
967 }
968
969 @Override
970 public void remove() {
971 throw new UnsupportedOperationException();
972 }
973 };
974 }
975
976
977
978 long acceptableSize() {
979 return (long)Math.floor(this.maxSize * this.acceptableFactor);
980 }
981 private long minSize() {
982 return (long)Math.floor(this.maxSize * this.minFactor);
983 }
984 private long singleSize() {
985 return (long)Math.floor(this.maxSize * this.singleFactor * this.minFactor);
986 }
987 private long multiSize() {
988 return (long)Math.floor(this.maxSize * this.multiFactor * this.minFactor);
989 }
990 private long memorySize() {
991 return (long)Math.floor(this.maxSize * this.memoryFactor * this.minFactor);
992 }
993
994 public void shutdown() {
995 if (victimHandler != null)
996 victimHandler.shutdown();
997 this.scheduleThreadPool.shutdown();
998 for (int i = 0; i < 10; i++) {
999 if (!this.scheduleThreadPool.isShutdown()) {
1000 try {
1001 Thread.sleep(10);
1002 } catch (InterruptedException e) {
1003 LOG.warn("Interrupted while sleeping");
1004 Thread.currentThread().interrupt();
1005 break;
1006 }
1007 }
1008 }
1009
1010 if (!this.scheduleThreadPool.isShutdown()) {
1011 List<Runnable> runnables = this.scheduleThreadPool.shutdownNow();
1012 LOG.debug("Still running " + runnables);
1013 }
1014 this.evictionThread.shutdown();
1015 }
1016
1017
1018 @VisibleForTesting
1019 public void clearCache() {
1020 this.map.clear();
1021 this.elements.set(0);
1022 }
1023
1024
1025
1026
1027
1028 @VisibleForTesting
1029 SortedSet<String> getCachedFileNamesForTest() {
1030 SortedSet<String> fileNames = new TreeSet<String>();
1031 for (BlockCacheKey cacheKey : map.keySet()) {
1032 fileNames.add(cacheKey.getHfileName());
1033 }
1034 return fileNames;
1035 }
1036
1037 @VisibleForTesting
1038 Map<BlockType, Integer> getBlockTypeCountsForTest() {
1039 Map<BlockType, Integer> counts =
1040 new EnumMap<BlockType, Integer>(BlockType.class);
1041 for (LruCachedBlock cb : map.values()) {
1042 BlockType blockType = ((Cacheable)cb.getBuffer()).getBlockType();
1043 Integer count = counts.get(blockType);
1044 counts.put(blockType, (count == null ? 0 : count) + 1);
1045 }
1046 return counts;
1047 }
1048
1049 @VisibleForTesting
1050 public Map<DataBlockEncoding, Integer> getEncodingCountsForTest() {
1051 Map<DataBlockEncoding, Integer> counts =
1052 new EnumMap<DataBlockEncoding, Integer>(DataBlockEncoding.class);
1053 for (LruCachedBlock block : map.values()) {
1054 DataBlockEncoding encoding =
1055 ((HFileBlock) block.getBuffer()).getDataBlockEncoding();
1056 Integer count = counts.get(encoding);
1057 counts.put(encoding, (count == null ? 0 : count) + 1);
1058 }
1059 return counts;
1060 }
1061
1062 public void setVictimCache(BucketCache handler) {
1063 assert victimHandler == null;
1064 victimHandler = handler;
1065 }
1066
1067 @VisibleForTesting
1068 Map<BlockCacheKey, LruCachedBlock> getMapForTests() {
1069 return map;
1070 }
1071
1072 BucketCache getVictimHandler() {
1073 return this.victimHandler;
1074 }
1075
1076 @Override
1077 public BlockCache[] getBlockCaches() {
1078 return null;
1079 }
1080 }