1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.io.hfile;
20
21 import java.io.IOException;
22 import java.lang.ref.WeakReference;
23 import java.nio.ByteBuffer;
24 import java.util.ArrayList;
25 import java.util.Collections;
26 import java.util.EnumMap;
27 import java.util.HashMap;
28 import java.util.List;
29 import java.util.Map;
30 import java.util.PriorityQueue;
31 import java.util.SortedSet;
32 import java.util.TreeSet;
33 import java.util.concurrent.ConcurrentHashMap;
34 import java.util.concurrent.Executors;
35 import java.util.concurrent.ScheduledExecutorService;
36 import java.util.concurrent.TimeUnit;
37 import java.util.concurrent.atomic.AtomicLong;
38 import java.util.concurrent.locks.ReentrantLock;
39
40 import org.apache.commons.logging.Log;
41 import org.apache.commons.logging.LogFactory;
42 import org.apache.hadoop.classification.InterfaceAudience;
43 import org.apache.hadoop.conf.Configuration;
44 import org.apache.hadoop.fs.FileSystem;
45 import org.apache.hadoop.fs.Path;
46 import org.apache.hadoop.hbase.io.HeapSize;
47 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
48 import org.apache.hadoop.hbase.io.hfile.CachedBlock.BlockPriority;
49 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
50 import org.apache.hadoop.hbase.util.Bytes;
51 import org.apache.hadoop.hbase.util.ClassSize;
52 import org.apache.hadoop.hbase.util.FSUtils;
53 import org.apache.hadoop.hbase.util.HasThread;
54 import org.apache.hadoop.hbase.util.Threads;
55 import org.apache.hadoop.util.StringUtils;
56
57 import com.google.common.util.concurrent.ThreadFactoryBuilder;
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97 @InterfaceAudience.Private
98 public class LruBlockCache implements BlockCache, HeapSize {
99
100 static final Log LOG = LogFactory.getLog(LruBlockCache.class);
101
102 static final String LRU_MIN_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.min.factor";
103 static final String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.acceptable.factor";
104 static final String LRU_SINGLE_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.single.percentage";
105 static final String LRU_MULTI_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.multi.percentage";
106 static final String LRU_MEMORY_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.memory.percentage";
107
108
109
110
111
112
113 static final String LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME = "hbase.lru.rs.inmemoryforcemode";
114
115
116
117
118 static final float DEFAULT_LOAD_FACTOR = 0.75f;
119 static final int DEFAULT_CONCURRENCY_LEVEL = 16;
120
121
122 static final float DEFAULT_MIN_FACTOR = 0.95f;
123 static final float DEFAULT_ACCEPTABLE_FACTOR = 0.99f;
124
125
126 static final float DEFAULT_SINGLE_FACTOR = 0.25f;
127 static final float DEFAULT_MULTI_FACTOR = 0.50f;
128 static final float DEFAULT_MEMORY_FACTOR = 0.25f;
129
130 static final boolean DEFAULT_IN_MEMORY_FORCE_MODE = false;
131
132
133 static final int statThreadPeriod = 60 * 5;
134
135
136 private final ConcurrentHashMap<BlockCacheKey,CachedBlock> map;
137
138
139 private final ReentrantLock evictionLock = new ReentrantLock(true);
140
141
142 private volatile boolean evictionInProgress = false;
143
144
145 private final EvictionThread evictionThread;
146
147
148 private final ScheduledExecutorService scheduleThreadPool =
149 Executors.newScheduledThreadPool(1,
150 new ThreadFactoryBuilder()
151 .setNameFormat("LruStats #%d")
152 .setDaemon(true)
153 .build());
154
155
156 private final AtomicLong size;
157
158
159 private final AtomicLong elements;
160
161
162 private final AtomicLong count;
163
164
165 private final CacheStats stats;
166
167
168 private long maxSize;
169
170
171 private long blockSize;
172
173
174 private float acceptableFactor;
175
176
177 private float minFactor;
178
179
180 private float singleFactor;
181
182
183 private float multiFactor;
184
185
186 private float memoryFactor;
187
188
189 private long overhead;
190
191
192 private boolean forceInMemory;
193
194
195 private BucketCache victimHandler = null;
196
197
198
199
200
201
202
203
204
205
206 public LruBlockCache(long maxSize, long blockSize) {
207 this(maxSize, blockSize, true);
208 }
209
210
211
212
213 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread) {
214 this(maxSize, blockSize, evictionThread,
215 (int)Math.ceil(1.2*maxSize/blockSize),
216 DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL,
217 DEFAULT_MIN_FACTOR, DEFAULT_ACCEPTABLE_FACTOR,
218 DEFAULT_SINGLE_FACTOR,
219 DEFAULT_MULTI_FACTOR,
220 DEFAULT_MEMORY_FACTOR,
221 false
222 );
223 }
224
225 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Configuration conf) {
226 this(maxSize, blockSize, evictionThread,
227 (int)Math.ceil(1.2*maxSize/blockSize),
228 DEFAULT_LOAD_FACTOR,
229 DEFAULT_CONCURRENCY_LEVEL,
230 conf.getFloat(LRU_MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR),
231 conf.getFloat(LRU_ACCEPTABLE_FACTOR_CONFIG_NAME, DEFAULT_ACCEPTABLE_FACTOR),
232 conf.getFloat(LRU_SINGLE_PERCENTAGE_CONFIG_NAME, DEFAULT_SINGLE_FACTOR),
233 conf.getFloat(LRU_MULTI_PERCENTAGE_CONFIG_NAME, DEFAULT_MULTI_FACTOR),
234 conf.getFloat(LRU_MEMORY_PERCENTAGE_CONFIG_NAME, DEFAULT_MEMORY_FACTOR),
235 conf.getBoolean(LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME, DEFAULT_IN_MEMORY_FORCE_MODE)
236 );
237 }
238
239 public LruBlockCache(long maxSize, long blockSize, Configuration conf) {
240 this(maxSize, blockSize, true, conf);
241 }
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread,
258 int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel,
259 float minFactor, float acceptableFactor, float singleFactor,
260 float multiFactor, float memoryFactor, boolean forceInMemory) {
261 if(singleFactor + multiFactor + memoryFactor != 1 ||
262 singleFactor < 0 || multiFactor < 0 || memoryFactor < 0) {
263 throw new IllegalArgumentException("Single, multi, and memory factors " +
264 " should be non-negative and total 1.0");
265 }
266 if(minFactor >= acceptableFactor) {
267 throw new IllegalArgumentException("minFactor must be smaller than acceptableFactor");
268 }
269 if(minFactor >= 1.0f || acceptableFactor >= 1.0f) {
270 throw new IllegalArgumentException("all factors must be < 1");
271 }
272 this.maxSize = maxSize;
273 this.blockSize = blockSize;
274 this.forceInMemory = forceInMemory;
275 map = new ConcurrentHashMap<BlockCacheKey,CachedBlock>(mapInitialSize,
276 mapLoadFactor, mapConcurrencyLevel);
277 this.minFactor = minFactor;
278 this.acceptableFactor = acceptableFactor;
279 this.singleFactor = singleFactor;
280 this.multiFactor = multiFactor;
281 this.memoryFactor = memoryFactor;
282 this.stats = new CacheStats();
283 this.count = new AtomicLong(0);
284 this.elements = new AtomicLong(0);
285 this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
286 this.size = new AtomicLong(this.overhead);
287 if(evictionThread) {
288 this.evictionThread = new EvictionThread(this);
289 this.evictionThread.start();
290 } else {
291 this.evictionThread = null;
292 }
293 this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),
294 statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);
295 }
296
297 public void setMaxSize(long maxSize) {
298 this.maxSize = maxSize;
299 if(this.size.get() > acceptableSize() && !evictionInProgress) {
300 runEviction();
301 }
302 }
303
304
305
306
307
308
309
310
311
312
313
314
315 @Override
316 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) {
317 CachedBlock cb = map.get(cacheKey);
318 if(cb != null) {
319
320 if (compare(buf, cb.getBuffer()) != 0) {
321 throw new RuntimeException("Cached block contents differ, which should not have happened."
322 + "cacheKey:" + cacheKey);
323 }
324 String msg = "Cached an already cached block: " + cacheKey + " cb:" + cb.getCacheKey();
325 msg += ". This is harmless and can happen in rare cases (see HBASE-8547)";
326 LOG.warn(msg);
327 return;
328 }
329 cb = new CachedBlock(cacheKey, buf, count.incrementAndGet(), inMemory);
330 long newSize = updateSizeMetrics(cb, false);
331 map.put(cacheKey, cb);
332 elements.incrementAndGet();
333 if(newSize > acceptableSize() && !evictionInProgress) {
334 runEviction();
335 }
336 }
337
338 private int compare(Cacheable left, Cacheable right) {
339 ByteBuffer l = ByteBuffer.allocate(left.getSerializedLength());
340 left.serialize(l);
341 ByteBuffer r = ByteBuffer.allocate(right.getSerializedLength());
342 right.serialize(r);
343 return Bytes.compareTo(l.array(), l.arrayOffset(), l.limit(),
344 r.array(), r.arrayOffset(), r.limit());
345 }
346
347
348
349
350
351
352
353
354
355
356
357 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {
358 cacheBlock(cacheKey, buf, false);
359 }
360
361
362
363
364
365
366
367
368
369 protected long updateSizeMetrics(CachedBlock cb, boolean evict) {
370 long heapsize = cb.heapSize();
371 if (evict) {
372 heapsize *= -1;
373 }
374 return size.addAndGet(heapsize);
375 }
376
377
378
379
380
381
382
383
384
385
386 @Override
387 public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat,
388 boolean updateCacheMetrics) {
389 CachedBlock cb = map.get(cacheKey);
390 if(cb == null) {
391 if (!repeat && updateCacheMetrics) stats.miss(caching);
392 if (victimHandler != null)
393 return victimHandler.getBlock(cacheKey, caching, repeat, updateCacheMetrics);
394 return null;
395 }
396 if (updateCacheMetrics) stats.hit(caching);
397 cb.access(count.incrementAndGet());
398 return cb.getBuffer();
399 }
400
401
402
403
404
405
406 public boolean containsBlock(BlockCacheKey cacheKey) {
407 return map.containsKey(cacheKey);
408 }
409
410 @Override
411 public boolean evictBlock(BlockCacheKey cacheKey) {
412 CachedBlock cb = map.get(cacheKey);
413 if (cb == null) return false;
414 evictBlock(cb, false);
415 return true;
416 }
417
418
419
420
421
422
423
424
425
426
427
428 @Override
429 public int evictBlocksByHfileName(String hfileName) {
430 int numEvicted = 0;
431 for (BlockCacheKey key : map.keySet()) {
432 if (key.getHfileName().equals(hfileName)) {
433 if (evictBlock(key))
434 ++numEvicted;
435 }
436 }
437 if (victimHandler != null) {
438 numEvicted += victimHandler.evictBlocksByHfileName(hfileName);
439 }
440 return numEvicted;
441 }
442
443
444
445
446
447
448
449
450
451 protected long evictBlock(CachedBlock block, boolean evictedByEvictionProcess) {
452 map.remove(block.getCacheKey());
453 updateSizeMetrics(block, true);
454 elements.decrementAndGet();
455 stats.evicted();
456 if (evictedByEvictionProcess && victimHandler != null) {
457 boolean wait = getCurrentSize() < acceptableSize();
458 boolean inMemory = block.getPriority() == BlockPriority.MEMORY;
459 victimHandler.cacheBlockWithWait(block.getCacheKey(), block.getBuffer(),
460 inMemory, wait);
461 }
462 return block.heapSize();
463 }
464
465
466
467
468 private void runEviction() {
469 if(evictionThread == null) {
470 evict();
471 } else {
472 evictionThread.evict();
473 }
474 }
475
476
477
478
479 void evict() {
480
481
482 if(!evictionLock.tryLock()) return;
483
484 try {
485 evictionInProgress = true;
486 long currentSize = this.size.get();
487 long bytesToFree = currentSize - minSize();
488
489 if (LOG.isTraceEnabled()) {
490 LOG.trace("Block cache LRU eviction started; Attempting to free " +
491 StringUtils.byteDesc(bytesToFree) + " of total=" +
492 StringUtils.byteDesc(currentSize));
493 }
494
495 if(bytesToFree <= 0) return;
496
497
498 BlockBucket bucketSingle = new BlockBucket(bytesToFree, blockSize,
499 singleSize());
500 BlockBucket bucketMulti = new BlockBucket(bytesToFree, blockSize,
501 multiSize());
502 BlockBucket bucketMemory = new BlockBucket(bytesToFree, blockSize,
503 memorySize());
504
505
506 for(CachedBlock cachedBlock : map.values()) {
507 switch(cachedBlock.getPriority()) {
508 case SINGLE: {
509 bucketSingle.add(cachedBlock);
510 break;
511 }
512 case MULTI: {
513 bucketMulti.add(cachedBlock);
514 break;
515 }
516 case MEMORY: {
517 bucketMemory.add(cachedBlock);
518 break;
519 }
520 }
521 }
522
523 long bytesFreed = 0;
524 if (forceInMemory || memoryFactor > 0.999f) {
525 long s = bucketSingle.totalSize();
526 long m = bucketMulti.totalSize();
527 if (bytesToFree > (s + m)) {
528
529
530 bytesFreed = bucketSingle.free(s);
531 bytesFreed += bucketMulti.free(m);
532 bytesFreed += bucketMemory.free(bytesToFree - bytesFreed);
533 } else {
534
535
536
537 long bytesRemain = s + m - bytesToFree;
538 if (3 * s <= bytesRemain) {
539
540
541 bytesFreed = bucketMulti.free(bytesToFree);
542 } else if (3 * m <= 2 * bytesRemain) {
543
544
545 bytesFreed = bucketSingle.free(bytesToFree);
546 } else {
547
548 bytesFreed = bucketSingle.free(s - bytesRemain / 3);
549 if (bytesFreed < bytesToFree) {
550 bytesFreed += bucketMulti.free(bytesToFree - bytesFreed);
551 }
552 }
553 }
554 } else {
555 PriorityQueue<BlockBucket> bucketQueue =
556 new PriorityQueue<BlockBucket>(3);
557
558 bucketQueue.add(bucketSingle);
559 bucketQueue.add(bucketMulti);
560 bucketQueue.add(bucketMemory);
561
562 int remainingBuckets = 3;
563
564 BlockBucket bucket;
565 while((bucket = bucketQueue.poll()) != null) {
566 long overflow = bucket.overflow();
567 if(overflow > 0) {
568 long bucketBytesToFree = Math.min(overflow,
569 (bytesToFree - bytesFreed) / remainingBuckets);
570 bytesFreed += bucket.free(bucketBytesToFree);
571 }
572 remainingBuckets--;
573 }
574 }
575
576 if (LOG.isTraceEnabled()) {
577 long single = bucketSingle.totalSize();
578 long multi = bucketMulti.totalSize();
579 long memory = bucketMemory.totalSize();
580 LOG.trace("Block cache LRU eviction completed; " +
581 "freed=" + StringUtils.byteDesc(bytesFreed) + ", " +
582 "total=" + StringUtils.byteDesc(this.size.get()) + ", " +
583 "single=" + StringUtils.byteDesc(single) + ", " +
584 "multi=" + StringUtils.byteDesc(multi) + ", " +
585 "memory=" + StringUtils.byteDesc(memory));
586 }
587 } finally {
588 stats.evict();
589 evictionInProgress = false;
590 evictionLock.unlock();
591 }
592 }
593
594
595
596
597
598
599
600 private class BlockBucket implements Comparable<BlockBucket> {
601
602 private CachedBlockQueue queue;
603 private long totalSize = 0;
604 private long bucketSize;
605
606 public BlockBucket(long bytesToFree, long blockSize, long bucketSize) {
607 this.bucketSize = bucketSize;
608 queue = new CachedBlockQueue(bytesToFree, blockSize);
609 totalSize = 0;
610 }
611
612 public void add(CachedBlock block) {
613 totalSize += block.heapSize();
614 queue.add(block);
615 }
616
617 public long free(long toFree) {
618 CachedBlock cb;
619 long freedBytes = 0;
620 while ((cb = queue.pollLast()) != null) {
621 freedBytes += evictBlock(cb, true);
622 if (freedBytes >= toFree) {
623 return freedBytes;
624 }
625 }
626 return freedBytes;
627 }
628
629 public long overflow() {
630 return totalSize - bucketSize;
631 }
632
633 public long totalSize() {
634 return totalSize;
635 }
636
637 public int compareTo(BlockBucket that) {
638 if(this.overflow() == that.overflow()) return 0;
639 return this.overflow() > that.overflow() ? 1 : -1;
640 }
641
642 @Override
643 public boolean equals(Object that) {
644 if (that == null || !(that instanceof BlockBucket)){
645 return false;
646 }
647
648 return compareTo(( BlockBucket)that) == 0;
649 }
650
651 }
652
653
654
655
656
657 public long getMaxSize() {
658 return this.maxSize;
659 }
660
661 @Override
662 public long getCurrentSize() {
663 return this.size.get();
664 }
665
666 @Override
667 public long getFreeSize() {
668 return getMaxSize() - getCurrentSize();
669 }
670
671 @Override
672 public long size() {
673 return this.elements.get();
674 }
675
676 @Override
677 public long getBlockCount() {
678 return this.elements.get();
679 }
680
681
682
683
684 public long getEvictionCount() {
685 return this.stats.getEvictionCount();
686 }
687
688 @Override
689 public long getEvictedCount() {
690 return this.stats.getEvictedCount();
691 }
692
693 EvictionThread getEvictionThread() {
694 return this.evictionThread;
695 }
696
697
698
699
700
701
702
703 static class EvictionThread extends HasThread {
704 private WeakReference<LruBlockCache> cache;
705 private boolean go = true;
706
707 private boolean enteringRun = false;
708
709 public EvictionThread(LruBlockCache cache) {
710 super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
711 setDaemon(true);
712 this.cache = new WeakReference<LruBlockCache>(cache);
713 }
714
715 @Override
716 public void run() {
717 enteringRun = true;
718 while (this.go) {
719 synchronized(this) {
720 try {
721 this.wait();
722 } catch(InterruptedException e) {}
723 }
724 LruBlockCache cache = this.cache.get();
725 if(cache == null) break;
726 cache.evict();
727 }
728 }
729
730 public void evict() {
731 synchronized(this) {
732 this.notifyAll();
733 }
734 }
735
736 synchronized void shutdown() {
737 this.go = false;
738 this.notifyAll();
739 }
740
741
742
743
744 boolean isEnteringRun() {
745 return this.enteringRun;
746 }
747 }
748
749
750
751
752 static class StatisticsThread extends Thread {
753 LruBlockCache lru;
754
755 public StatisticsThread(LruBlockCache lru) {
756 super("LruBlockCache.StatisticsThread");
757 setDaemon(true);
758 this.lru = lru;
759 }
760 @Override
761 public void run() {
762 lru.logStats();
763 }
764 }
765
766 public void logStats() {
767 if (!LOG.isDebugEnabled()) return;
768
769 long totalSize = heapSize();
770 long freeSize = maxSize - totalSize;
771 LruBlockCache.LOG.debug("Total=" + StringUtils.byteDesc(totalSize) + ", " +
772 "free=" + StringUtils.byteDesc(freeSize) + ", " +
773 "max=" + StringUtils.byteDesc(this.maxSize) + ", " +
774 "blocks=" + size() +", " +
775 "accesses=" + stats.getRequestCount() + ", " +
776 "hits=" + stats.getHitCount() + ", " +
777 "hitRatio=" +
778 (stats.getHitCount() == 0 ? "0" : (StringUtils.formatPercent(stats.getHitRatio(), 2)+ ", ")) + ", " +
779 "cachingAccesses=" + stats.getRequestCachingCount() + ", " +
780 "cachingHits=" + stats.getHitCachingCount() + ", " +
781 "cachingHitsRatio=" +
782 (stats.getHitCachingCount() == 0 ? "0,": (StringUtils.formatPercent(stats.getHitCachingRatio(), 2) + ", ")) +
783 "evictions=" + stats.getEvictionCount() + ", " +
784 "evicted=" + stats.getEvictedCount() + ", " +
785 "evictedPerRun=" + stats.evictedPerEviction());
786 }
787
788
789
790
791
792
793
794 public CacheStats getStats() {
795 return this.stats;
796 }
797
798 public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
799 (3 * Bytes.SIZEOF_LONG) + (9 * ClassSize.REFERENCE) +
800 (5 * Bytes.SIZEOF_FLOAT) + Bytes.SIZEOF_BOOLEAN
801 + ClassSize.OBJECT);
802
803
804 public long heapSize() {
805 return getCurrentSize();
806 }
807
808 public static long calculateOverhead(long maxSize, long blockSize, int concurrency){
809
810 return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
811 ((long)Math.ceil(maxSize*1.2/blockSize)
812 * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
813 ((long)concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
814 }
815
816 @Override
817 public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries(Configuration conf) throws IOException {
818
819 Map<String, Path> sfMap = FSUtils.getTableStoreFilePathMap(
820 FileSystem.get(conf),
821 FSUtils.getRootDir(conf));
822
823
824
825 Map<BlockCacheColumnFamilySummary, BlockCacheColumnFamilySummary> bcs =
826 new HashMap<BlockCacheColumnFamilySummary, BlockCacheColumnFamilySummary>();
827
828 for (CachedBlock cb : map.values()) {
829 String sf = cb.getCacheKey().getHfileName();
830 Path path = sfMap.get(sf);
831 if ( path != null) {
832 BlockCacheColumnFamilySummary lookup =
833 BlockCacheColumnFamilySummary.createFromStoreFilePath(path);
834 BlockCacheColumnFamilySummary bcse = bcs.get(lookup);
835 if (bcse == null) {
836 bcse = BlockCacheColumnFamilySummary.create(lookup);
837 bcs.put(lookup,bcse);
838 }
839 bcse.incrementBlocks();
840 bcse.incrementHeapSize(cb.heapSize());
841 }
842 }
843 List<BlockCacheColumnFamilySummary> list =
844 new ArrayList<BlockCacheColumnFamilySummary>(bcs.values());
845 Collections.sort( list );
846 return list;
847 }
848
849
850
851 private long acceptableSize() {
852 return (long)Math.floor(this.maxSize * this.acceptableFactor);
853 }
854 private long minSize() {
855 return (long)Math.floor(this.maxSize * this.minFactor);
856 }
857 private long singleSize() {
858 return (long)Math.floor(this.maxSize * this.singleFactor * this.minFactor);
859 }
860 private long multiSize() {
861 return (long)Math.floor(this.maxSize * this.multiFactor * this.minFactor);
862 }
863 private long memorySize() {
864 return (long)Math.floor(this.maxSize * this.memoryFactor * this.minFactor);
865 }
866
867 public void shutdown() {
868 if (victimHandler != null)
869 victimHandler.shutdown();
870 this.scheduleThreadPool.shutdown();
871 for (int i = 0; i < 10; i++) {
872 if (!this.scheduleThreadPool.isShutdown()) Threads.sleep(10);
873 }
874 if (!this.scheduleThreadPool.isShutdown()) {
875 List<Runnable> runnables = this.scheduleThreadPool.shutdownNow();
876 LOG.debug("Still running " + runnables);
877 }
878 this.evictionThread.shutdown();
879 }
880
881
882 public void clearCache() {
883 map.clear();
884 }
885
886
887
888
889
890 SortedSet<String> getCachedFileNamesForTest() {
891 SortedSet<String> fileNames = new TreeSet<String>();
892 for (BlockCacheKey cacheKey : map.keySet()) {
893 fileNames.add(cacheKey.getHfileName());
894 }
895 return fileNames;
896 }
897
898 Map<BlockType, Integer> getBlockTypeCountsForTest() {
899 Map<BlockType, Integer> counts =
900 new EnumMap<BlockType, Integer>(BlockType.class);
901 for (CachedBlock cb : map.values()) {
902 BlockType blockType = ((HFileBlock) cb.getBuffer()).getBlockType();
903 Integer count = counts.get(blockType);
904 counts.put(blockType, (count == null ? 0 : count) + 1);
905 }
906 return counts;
907 }
908
909 public Map<DataBlockEncoding, Integer> getEncodingCountsForTest() {
910 Map<DataBlockEncoding, Integer> counts =
911 new EnumMap<DataBlockEncoding, Integer>(DataBlockEncoding.class);
912 for (BlockCacheKey cacheKey : map.keySet()) {
913 DataBlockEncoding encoding = cacheKey.getDataBlockEncoding();
914 Integer count = counts.get(encoding);
915 counts.put(encoding, (count == null ? 0 : count) + 1);
916 }
917 return counts;
918 }
919
920 public void setVictimCache(BucketCache handler) {
921 assert victimHandler == null;
922 victimHandler = handler;
923 }
924
925 }