1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.io.hfile;
20
21 import java.lang.ref.WeakReference;
22 import java.nio.ByteBuffer;
23 import java.util.EnumMap;
24 import java.util.Iterator;
25 import java.util.List;
26 import java.util.Map;
27 import java.util.PriorityQueue;
28 import java.util.SortedSet;
29 import java.util.TreeSet;
30 import java.util.concurrent.ConcurrentHashMap;
31 import java.util.concurrent.Executors;
32 import java.util.concurrent.ScheduledExecutorService;
33 import java.util.concurrent.TimeUnit;
34 import java.util.concurrent.atomic.AtomicLong;
35 import java.util.concurrent.locks.ReentrantLock;
36
37 import org.apache.commons.logging.Log;
38 import org.apache.commons.logging.LogFactory;
39 import org.apache.hadoop.classification.InterfaceAudience;
40 import org.apache.hadoop.conf.Configuration;
41 import org.apache.hadoop.hbase.io.HeapSize;
42 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
43 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
44 import org.apache.hadoop.hbase.util.Bytes;
45 import org.apache.hadoop.hbase.util.ClassSize;
46 import org.apache.hadoop.hbase.util.HasThread;
47 import org.apache.hadoop.hbase.util.Threads;
48 import org.apache.hadoop.util.StringUtils;
49 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
50
51 import com.google.common.annotations.VisibleForTesting;
52 import com.google.common.util.concurrent.ThreadFactoryBuilder;
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92 @InterfaceAudience.Private
93 @JsonIgnoreProperties({"encodingCountsForTest"})
94 public class LruBlockCache implements BlockCache, HeapSize {
95
96 static final Log LOG = LogFactory.getLog(LruBlockCache.class);
97
98 static final String LRU_MIN_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.min.factor";
99 static final String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.acceptable.factor";
100 static final String LRU_SINGLE_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.single.percentage";
101 static final String LRU_MULTI_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.multi.percentage";
102 static final String LRU_MEMORY_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.memory.percentage";
103
104
105
106
107
108
109 static final String LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME = "hbase.lru.rs.inmemoryforcemode";
110
111
112
113
114 static final float DEFAULT_LOAD_FACTOR = 0.75f;
115 static final int DEFAULT_CONCURRENCY_LEVEL = 16;
116
117
118 static final float DEFAULT_MIN_FACTOR = 0.95f;
119 static final float DEFAULT_ACCEPTABLE_FACTOR = 0.99f;
120
121
122 static final float DEFAULT_SINGLE_FACTOR = 0.25f;
123 static final float DEFAULT_MULTI_FACTOR = 0.50f;
124 static final float DEFAULT_MEMORY_FACTOR = 0.25f;
125
126 static final boolean DEFAULT_IN_MEMORY_FORCE_MODE = false;
127
128
129 static final int statThreadPeriod = 60 * 5;
130
131
132 private final Map<BlockCacheKey,LruCachedBlock> map;
133
134
135 private final ReentrantLock evictionLock = new ReentrantLock(true);
136
137
138 private volatile boolean evictionInProgress = false;
139
140
141 private final EvictionThread evictionThread;
142
143
144 private final ScheduledExecutorService scheduleThreadPool =
145 Executors.newScheduledThreadPool(1,
146 new ThreadFactoryBuilder()
147 .setNameFormat("LruStats #%d")
148 .setDaemon(true)
149 .build());
150
151
152 private final AtomicLong size;
153
154
155 private final AtomicLong elements;
156
157
158 private final AtomicLong count;
159
160
161 private final CacheStats stats;
162
163
164 private long maxSize;
165
166
167 private long blockSize;
168
169
170 private float acceptableFactor;
171
172
173 private float minFactor;
174
175
176 private float singleFactor;
177
178
179 private float multiFactor;
180
181
182 private float memoryFactor;
183
184
185 private long overhead;
186
187
188 private boolean forceInMemory;
189
190
191 private BucketCache victimHandler = null;
192
193
194
195
196
197
198
199
200
201
202 public LruBlockCache(long maxSize, long blockSize) {
203 this(maxSize, blockSize, true);
204 }
205
206
207
208
209 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread) {
210 this(maxSize, blockSize, evictionThread,
211 (int)Math.ceil(1.2*maxSize/blockSize),
212 DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL,
213 DEFAULT_MIN_FACTOR, DEFAULT_ACCEPTABLE_FACTOR,
214 DEFAULT_SINGLE_FACTOR,
215 DEFAULT_MULTI_FACTOR,
216 DEFAULT_MEMORY_FACTOR,
217 false
218 );
219 }
220
221 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Configuration conf) {
222 this(maxSize, blockSize, evictionThread,
223 (int)Math.ceil(1.2*maxSize/blockSize),
224 DEFAULT_LOAD_FACTOR,
225 DEFAULT_CONCURRENCY_LEVEL,
226 conf.getFloat(LRU_MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR),
227 conf.getFloat(LRU_ACCEPTABLE_FACTOR_CONFIG_NAME, DEFAULT_ACCEPTABLE_FACTOR),
228 conf.getFloat(LRU_SINGLE_PERCENTAGE_CONFIG_NAME, DEFAULT_SINGLE_FACTOR),
229 conf.getFloat(LRU_MULTI_PERCENTAGE_CONFIG_NAME, DEFAULT_MULTI_FACTOR),
230 conf.getFloat(LRU_MEMORY_PERCENTAGE_CONFIG_NAME, DEFAULT_MEMORY_FACTOR),
231 conf.getBoolean(LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME, DEFAULT_IN_MEMORY_FORCE_MODE)
232 );
233 }
234
235 public LruBlockCache(long maxSize, long blockSize, Configuration conf) {
236 this(maxSize, blockSize, true, conf);
237 }
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread,
254 int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel,
255 float minFactor, float acceptableFactor, float singleFactor,
256 float multiFactor, float memoryFactor, boolean forceInMemory) {
257 if(singleFactor + multiFactor + memoryFactor != 1 ||
258 singleFactor < 0 || multiFactor < 0 || memoryFactor < 0) {
259 throw new IllegalArgumentException("Single, multi, and memory factors " +
260 " should be non-negative and total 1.0");
261 }
262 if(minFactor >= acceptableFactor) {
263 throw new IllegalArgumentException("minFactor must be smaller than acceptableFactor");
264 }
265 if(minFactor >= 1.0f || acceptableFactor >= 1.0f) {
266 throw new IllegalArgumentException("all factors must be < 1");
267 }
268 this.maxSize = maxSize;
269 this.blockSize = blockSize;
270 this.forceInMemory = forceInMemory;
271 map = new ConcurrentHashMap<BlockCacheKey,LruCachedBlock>(mapInitialSize,
272 mapLoadFactor, mapConcurrencyLevel);
273 this.minFactor = minFactor;
274 this.acceptableFactor = acceptableFactor;
275 this.singleFactor = singleFactor;
276 this.multiFactor = multiFactor;
277 this.memoryFactor = memoryFactor;
278 this.stats = new CacheStats();
279 this.count = new AtomicLong(0);
280 this.elements = new AtomicLong(0);
281 this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
282 this.size = new AtomicLong(this.overhead);
283 if(evictionThread) {
284 this.evictionThread = new EvictionThread(this);
285 this.evictionThread.start();
286 } else {
287 this.evictionThread = null;
288 }
289 this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),
290 statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);
291 }
292
293 public void setMaxSize(long maxSize) {
294 this.maxSize = maxSize;
295 if(this.size.get() > acceptableSize() && !evictionInProgress) {
296 runEviction();
297 }
298 }
299
300
301
302
303
304
305
306
307
308
309
310
311 @Override
312 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) {
313 LruCachedBlock cb = map.get(cacheKey);
314 if(cb != null) {
315
316 if (compare(buf, cb.getBuffer()) != 0) {
317 throw new RuntimeException("Cached block contents differ, which should not have happened."
318 + "cacheKey:" + cacheKey);
319 }
320 String msg = "Cached an already cached block: " + cacheKey + " cb:" + cb.getCacheKey();
321 msg += ". This is harmless and can happen in rare cases (see HBASE-8547)";
322 LOG.warn(msg);
323 return;
324 }
325 cb = new LruCachedBlock(cacheKey, buf, count.incrementAndGet(), inMemory);
326 long newSize = updateSizeMetrics(cb, false);
327 map.put(cacheKey, cb);
328 elements.incrementAndGet();
329 if(newSize > acceptableSize() && !evictionInProgress) {
330 runEviction();
331 }
332 }
333
334 private int compare(Cacheable left, Cacheable right) {
335 ByteBuffer l = ByteBuffer.allocate(left.getSerializedLength());
336 left.serialize(l);
337 ByteBuffer r = ByteBuffer.allocate(right.getSerializedLength());
338 right.serialize(r);
339 return Bytes.compareTo(l.array(), l.arrayOffset(), l.limit(),
340 r.array(), r.arrayOffset(), r.limit());
341 }
342
343
344
345
346
347
348
349
350
351
352
353 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {
354 cacheBlock(cacheKey, buf, false);
355 }
356
357
358
359
360
361
362
363
364
365 protected long updateSizeMetrics(LruCachedBlock cb, boolean evict) {
366 long heapsize = cb.heapSize();
367 if (evict) {
368 heapsize *= -1;
369 }
370 return size.addAndGet(heapsize);
371 }
372
373
374
375
376
377
378
379
380
381
382 @Override
383 public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat,
384 boolean updateCacheMetrics) {
385 LruCachedBlock cb = map.get(cacheKey);
386 if(cb == null) {
387 if (!repeat && updateCacheMetrics) stats.miss(caching);
388 if (victimHandler != null)
389 return victimHandler.getBlock(cacheKey, caching, repeat, updateCacheMetrics);
390 return null;
391 }
392 if (updateCacheMetrics) stats.hit(caching);
393 cb.access(count.incrementAndGet());
394 return cb.getBuffer();
395 }
396
397
398
399
400
401
402 public boolean containsBlock(BlockCacheKey cacheKey) {
403 return map.containsKey(cacheKey);
404 }
405
406 @Override
407 public boolean evictBlock(BlockCacheKey cacheKey) {
408 LruCachedBlock cb = map.get(cacheKey);
409 if (cb == null) return false;
410 evictBlock(cb, false);
411 return true;
412 }
413
414
415
416
417
418
419
420
421
422
423
424 @Override
425 public int evictBlocksByHfileName(String hfileName) {
426 int numEvicted = 0;
427 for (BlockCacheKey key : map.keySet()) {
428 if (key.getHfileName().equals(hfileName)) {
429 if (evictBlock(key))
430 ++numEvicted;
431 }
432 }
433 if (victimHandler != null) {
434 numEvicted += victimHandler.evictBlocksByHfileName(hfileName);
435 }
436 return numEvicted;
437 }
438
439
440
441
442
443
444
445
446
447 protected long evictBlock(LruCachedBlock block, boolean evictedByEvictionProcess) {
448 map.remove(block.getCacheKey());
449 updateSizeMetrics(block, true);
450 elements.decrementAndGet();
451 stats.evicted();
452 if (evictedByEvictionProcess && victimHandler != null) {
453 boolean wait = getCurrentSize() < acceptableSize();
454 boolean inMemory = block.getPriority() == BlockPriority.MEMORY;
455 victimHandler.cacheBlockWithWait(block.getCacheKey(), block.getBuffer(),
456 inMemory, wait);
457 }
458 return block.heapSize();
459 }
460
461
462
463
464 private void runEviction() {
465 if(evictionThread == null) {
466 evict();
467 } else {
468 evictionThread.evict();
469 }
470 }
471
472
473
474
475 void evict() {
476
477
478 if(!evictionLock.tryLock()) return;
479
480 try {
481 evictionInProgress = true;
482 long currentSize = this.size.get();
483 long bytesToFree = currentSize - minSize();
484
485 if (LOG.isTraceEnabled()) {
486 LOG.trace("Block cache LRU eviction started; Attempting to free " +
487 StringUtils.byteDesc(bytesToFree) + " of total=" +
488 StringUtils.byteDesc(currentSize));
489 }
490
491 if(bytesToFree <= 0) return;
492
493
494 BlockBucket bucketSingle = new BlockBucket(bytesToFree, blockSize, singleSize());
495 BlockBucket bucketMulti = new BlockBucket(bytesToFree, blockSize, multiSize());
496 BlockBucket bucketMemory = new BlockBucket(bytesToFree, blockSize, memorySize());
497
498
499 for(LruCachedBlock cachedBlock : map.values()) {
500 switch(cachedBlock.getPriority()) {
501 case SINGLE: {
502 bucketSingle.add(cachedBlock);
503 break;
504 }
505 case MULTI: {
506 bucketMulti.add(cachedBlock);
507 break;
508 }
509 case MEMORY: {
510 bucketMemory.add(cachedBlock);
511 break;
512 }
513 }
514 }
515
516 long bytesFreed = 0;
517 if (forceInMemory || memoryFactor > 0.999f) {
518 long s = bucketSingle.totalSize();
519 long m = bucketMulti.totalSize();
520 if (bytesToFree > (s + m)) {
521
522
523 bytesFreed = bucketSingle.free(s);
524 bytesFreed += bucketMulti.free(m);
525 bytesFreed += bucketMemory.free(bytesToFree - bytesFreed);
526 } else {
527
528
529
530 long bytesRemain = s + m - bytesToFree;
531 if (3 * s <= bytesRemain) {
532
533
534 bytesFreed = bucketMulti.free(bytesToFree);
535 } else if (3 * m <= 2 * bytesRemain) {
536
537
538 bytesFreed = bucketSingle.free(bytesToFree);
539 } else {
540
541 bytesFreed = bucketSingle.free(s - bytesRemain / 3);
542 if (bytesFreed < bytesToFree) {
543 bytesFreed += bucketMulti.free(bytesToFree - bytesFreed);
544 }
545 }
546 }
547 } else {
548 PriorityQueue<BlockBucket> bucketQueue =
549 new PriorityQueue<BlockBucket>(3);
550
551 bucketQueue.add(bucketSingle);
552 bucketQueue.add(bucketMulti);
553 bucketQueue.add(bucketMemory);
554
555 int remainingBuckets = 3;
556
557 BlockBucket bucket;
558 while((bucket = bucketQueue.poll()) != null) {
559 long overflow = bucket.overflow();
560 if(overflow > 0) {
561 long bucketBytesToFree = Math.min(overflow,
562 (bytesToFree - bytesFreed) / remainingBuckets);
563 bytesFreed += bucket.free(bucketBytesToFree);
564 }
565 remainingBuckets--;
566 }
567 }
568
569 if (LOG.isTraceEnabled()) {
570 long single = bucketSingle.totalSize();
571 long multi = bucketMulti.totalSize();
572 long memory = bucketMemory.totalSize();
573 LOG.trace("Block cache LRU eviction completed; " +
574 "freed=" + StringUtils.byteDesc(bytesFreed) + ", " +
575 "total=" + StringUtils.byteDesc(this.size.get()) + ", " +
576 "single=" + StringUtils.byteDesc(single) + ", " +
577 "multi=" + StringUtils.byteDesc(multi) + ", " +
578 "memory=" + StringUtils.byteDesc(memory));
579 }
580 } finally {
581 stats.evict();
582 evictionInProgress = false;
583 evictionLock.unlock();
584 }
585 }
586
587
588
589
590
591
592
593 private class BlockBucket implements Comparable<BlockBucket> {
594 private LruCachedBlockQueue queue;
595 private long totalSize = 0;
596 private long bucketSize;
597
598 public BlockBucket(long bytesToFree, long blockSize, long bucketSize) {
599 this.bucketSize = bucketSize;
600 queue = new LruCachedBlockQueue(bytesToFree, blockSize);
601 totalSize = 0;
602 }
603
604 public void add(LruCachedBlock block) {
605 totalSize += block.heapSize();
606 queue.add(block);
607 }
608
609 public long free(long toFree) {
610 LruCachedBlock cb;
611 long freedBytes = 0;
612 while ((cb = queue.pollLast()) != null) {
613 freedBytes += evictBlock(cb, true);
614 if (freedBytes >= toFree) {
615 return freedBytes;
616 }
617 }
618 return freedBytes;
619 }
620
621 public long overflow() {
622 return totalSize - bucketSize;
623 }
624
625 public long totalSize() {
626 return totalSize;
627 }
628
629 public int compareTo(BlockBucket that) {
630 if(this.overflow() == that.overflow()) return 0;
631 return this.overflow() > that.overflow() ? 1 : -1;
632 }
633
634 @Override
635 public boolean equals(Object that) {
636 if (that == null || !(that instanceof BlockBucket)){
637 return false;
638 }
639 return compareTo((BlockBucket)that) == 0;
640 }
641
642 @Override
643 public int hashCode() {
644
645 return super.hashCode();
646 }
647 }
648
649
650
651
652
653 public long getMaxSize() {
654 return this.maxSize;
655 }
656
657 @Override
658 public long getCurrentSize() {
659 return this.size.get();
660 }
661
662 @Override
663 public long getFreeSize() {
664 return getMaxSize() - getCurrentSize();
665 }
666
667 @Override
668 public long size() {
669 return getMaxSize();
670 }
671
672 @Override
673 public long getBlockCount() {
674 return this.elements.get();
675 }
676
677 EvictionThread getEvictionThread() {
678 return this.evictionThread;
679 }
680
681
682
683
684
685
686
687 static class EvictionThread extends HasThread {
688 private WeakReference<LruBlockCache> cache;
689 private boolean go = true;
690
691 private boolean enteringRun = false;
692
693 public EvictionThread(LruBlockCache cache) {
694 super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
695 setDaemon(true);
696 this.cache = new WeakReference<LruBlockCache>(cache);
697 }
698
699 @Override
700 public void run() {
701 enteringRun = true;
702 while (this.go) {
703 synchronized(this) {
704 try {
705 this.wait(1000 * 10
706 } catch(InterruptedException e) {}
707 }
708 LruBlockCache cache = this.cache.get();
709 if (cache == null) break;
710 cache.evict();
711 }
712 }
713
714 @edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NN_NAKED_NOTIFY",
715 justification="This is what we want")
716 public void evict() {
717 synchronized(this) {
718 this.notifyAll();
719 }
720 }
721
722 synchronized void shutdown() {
723 this.go = false;
724 this.notifyAll();
725 }
726
727
728
729
730 boolean isEnteringRun() {
731 return this.enteringRun;
732 }
733 }
734
735
736
737
738 static class StatisticsThread extends Thread {
739 LruBlockCache lru;
740
741 public StatisticsThread(LruBlockCache lru) {
742 super("LruBlockCache.StatisticsThread");
743 setDaemon(true);
744 this.lru = lru;
745 }
746 @Override
747 public void run() {
748 lru.logStats();
749 }
750 }
751
752 public void logStats() {
753 if (!LOG.isDebugEnabled()) return;
754
755 long totalSize = heapSize();
756 long freeSize = maxSize - totalSize;
757 LruBlockCache.LOG.debug("Total=" + StringUtils.byteDesc(totalSize) + ", " +
758 "free=" + StringUtils.byteDesc(freeSize) + ", " +
759 "max=" + StringUtils.byteDesc(this.maxSize) + ", " +
760 "blocks=" + size() +", " +
761 "accesses=" + stats.getRequestCount() + ", " +
762 "hits=" + stats.getHitCount() + ", " +
763 "hitRatio=" +
764 (stats.getHitCount() == 0 ? "0" : (StringUtils.formatPercent(stats.getHitRatio(), 2)+ ", ")) + ", " +
765 "cachingAccesses=" + stats.getRequestCachingCount() + ", " +
766 "cachingHits=" + stats.getHitCachingCount() + ", " +
767 "cachingHitsRatio=" +
768 (stats.getHitCachingCount() == 0 ? "0,": (StringUtils.formatPercent(stats.getHitCachingRatio(), 2) + ", ")) +
769 "evictions=" + stats.getEvictionCount() + ", " +
770 "evicted=" + stats.getEvictedCount() + ", " +
771 "evictedPerRun=" + stats.evictedPerEviction());
772 }
773
774
775
776
777
778
779
780 public CacheStats getStats() {
781 return this.stats;
782 }
783
784 public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
785 (3 * Bytes.SIZEOF_LONG) + (9 * ClassSize.REFERENCE) +
786 (5 * Bytes.SIZEOF_FLOAT) + Bytes.SIZEOF_BOOLEAN
787 + ClassSize.OBJECT);
788
789
790 public long heapSize() {
791 return getCurrentSize();
792 }
793
794 public static long calculateOverhead(long maxSize, long blockSize, int concurrency){
795
796 return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
797 ((long)Math.ceil(maxSize*1.2/blockSize)
798 * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
799 ((long)concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
800 }
801
802 @Override
803 public Iterator<CachedBlock> iterator() {
804 final Iterator<LruCachedBlock> iterator = map.values().iterator();
805
806 return new Iterator<CachedBlock>() {
807 private final long now = System.nanoTime();
808
809 @Override
810 public boolean hasNext() {
811 return iterator.hasNext();
812 }
813
814 @Override
815 public CachedBlock next() {
816 final LruCachedBlock b = iterator.next();
817 return new CachedBlock() {
818 @Override
819 public String toString() {
820 return BlockCacheUtil.toString(this, now);
821 }
822
823 @Override
824 public BlockPriority getBlockPriority() {
825 return b.getPriority();
826 }
827
828 @Override
829 public BlockType getBlockType() {
830 return b.getBuffer().getBlockType();
831 }
832
833 @Override
834 public long getOffset() {
835 return b.getCacheKey().getOffset();
836 }
837
838 @Override
839 public long getSize() {
840 return b.getBuffer().heapSize();
841 }
842
843 @Override
844 public long getCachedTime() {
845 return b.getCachedTime();
846 }
847
848 @Override
849 public String getFilename() {
850 return b.getCacheKey().getHfileName();
851 }
852
853 @Override
854 public int compareTo(CachedBlock other) {
855 int diff = this.getFilename().compareTo(other.getFilename());
856 if (diff != 0) return diff;
857 diff = (int)(this.getOffset() - other.getOffset());
858 if (diff != 0) return diff;
859 if (other.getCachedTime() < 0 || this.getCachedTime() < 0) {
860 throw new IllegalStateException("" + this.getCachedTime() + ", " +
861 other.getCachedTime());
862 }
863 return (int)(other.getCachedTime() - this.getCachedTime());
864 }
865
866 @Override
867 public int hashCode() {
868 return b.hashCode();
869 }
870
871 @Override
872 public boolean equals(Object obj) {
873 if (obj instanceof CachedBlock) {
874 CachedBlock cb = (CachedBlock)obj;
875 return compareTo(cb) == 0;
876 } else {
877 return false;
878 }
879 }
880 };
881 }
882
883 @Override
884 public void remove() {
885 throw new UnsupportedOperationException();
886 }
887 };
888 }
889
890
891
892 private long acceptableSize() {
893 return (long)Math.floor(this.maxSize * this.acceptableFactor);
894 }
895 private long minSize() {
896 return (long)Math.floor(this.maxSize * this.minFactor);
897 }
898 private long singleSize() {
899 return (long)Math.floor(this.maxSize * this.singleFactor * this.minFactor);
900 }
901 private long multiSize() {
902 return (long)Math.floor(this.maxSize * this.multiFactor * this.minFactor);
903 }
904 private long memorySize() {
905 return (long)Math.floor(this.maxSize * this.memoryFactor * this.minFactor);
906 }
907
908 public void shutdown() {
909 if (victimHandler != null)
910 victimHandler.shutdown();
911 this.scheduleThreadPool.shutdown();
912 for (int i = 0; i < 10; i++) {
913 if (!this.scheduleThreadPool.isShutdown()) Threads.sleep(10);
914 }
915 if (!this.scheduleThreadPool.isShutdown()) {
916 List<Runnable> runnables = this.scheduleThreadPool.shutdownNow();
917 LOG.debug("Still running " + runnables);
918 }
919 this.evictionThread.shutdown();
920 }
921
922
923 public void clearCache() {
924 map.clear();
925 }
926
927
928
929
930
931 SortedSet<String> getCachedFileNamesForTest() {
932 SortedSet<String> fileNames = new TreeSet<String>();
933 for (BlockCacheKey cacheKey : map.keySet()) {
934 fileNames.add(cacheKey.getHfileName());
935 }
936 return fileNames;
937 }
938
939 @VisibleForTesting
940 Map<BlockType, Integer> getBlockTypeCountsForTest() {
941 Map<BlockType, Integer> counts =
942 new EnumMap<BlockType, Integer>(BlockType.class);
943 for (LruCachedBlock cb : map.values()) {
944 BlockType blockType = ((HFileBlock) cb.getBuffer()).getBlockType();
945 Integer count = counts.get(blockType);
946 counts.put(blockType, (count == null ? 0 : count) + 1);
947 }
948 return counts;
949 }
950
951 public Map<DataBlockEncoding, Integer> getEncodingCountsForTest() {
952 Map<DataBlockEncoding, Integer> counts =
953 new EnumMap<DataBlockEncoding, Integer>(DataBlockEncoding.class);
954 for (BlockCacheKey cacheKey : map.keySet()) {
955 DataBlockEncoding encoding = cacheKey.getDataBlockEncoding();
956 Integer count = counts.get(encoding);
957 counts.put(encoding, (count == null ? 0 : count) + 1);
958 }
959 return counts;
960 }
961
962 public void setVictimCache(BucketCache handler) {
963 assert victimHandler == null;
964 victimHandler = handler;
965 }
966
967 @Override
968 public BlockCache[] getBlockCaches() {
969 return null;
970 }
971 }