1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.io.hfile;
20
21 import java.io.IOException;
22 import java.lang.ref.WeakReference;
23 import java.nio.ByteBuffer;
24 import java.util.ArrayList;
25 import java.util.Collections;
26 import java.util.EnumMap;
27 import java.util.HashMap;
28 import java.util.List;
29 import java.util.Map;
30 import java.util.PriorityQueue;
31 import java.util.SortedSet;
32 import java.util.TreeSet;
33 import java.util.concurrent.ConcurrentHashMap;
34 import java.util.concurrent.Executors;
35 import java.util.concurrent.ScheduledExecutorService;
36 import java.util.concurrent.TimeUnit;
37 import java.util.concurrent.atomic.AtomicLong;
38 import java.util.concurrent.locks.ReentrantLock;
39
40 import org.apache.commons.logging.Log;
41 import org.apache.commons.logging.LogFactory;
42 import org.apache.hadoop.classification.InterfaceAudience;
43 import org.apache.hadoop.conf.Configuration;
44 import org.apache.hadoop.fs.FileSystem;
45 import org.apache.hadoop.fs.Path;
46 import org.apache.hadoop.hbase.io.HeapSize;
47 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
48 import org.apache.hadoop.hbase.io.hfile.CachedBlock.BlockPriority;
49 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
50 import org.apache.hadoop.hbase.util.Bytes;
51 import org.apache.hadoop.hbase.util.ClassSize;
52 import org.apache.hadoop.hbase.util.FSUtils;
53 import org.apache.hadoop.hbase.util.HasThread;
54 import org.apache.hadoop.hbase.util.Threads;
55 import org.apache.hadoop.util.StringUtils;
56
57 import com.google.common.util.concurrent.ThreadFactoryBuilder;
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97 @InterfaceAudience.Private
98 public class LruBlockCache implements BlockCache, HeapSize {
99
100 static final Log LOG = LogFactory.getLog(LruBlockCache.class);
101
102 static final String LRU_MIN_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.min.factor";
103 static final String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.acceptable.factor";
104 static final String LRU_SINGLE_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.single.percentage";
105 static final String LRU_MULTI_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.multi.percentage";
106 static final String LRU_MEMORY_PERCENTAGE_CONFIG_NAME = "hbase.lru.blockcache.memory.percentage";
107
108
109
110
111
112
113 static final String LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME = "hbase.lru.rs.inmemoryforcemode";
114
115
116
117
118 static final float DEFAULT_LOAD_FACTOR = 0.75f;
119 static final int DEFAULT_CONCURRENCY_LEVEL = 16;
120
121
122 static final float DEFAULT_MIN_FACTOR = 0.95f;
123 static final float DEFAULT_ACCEPTABLE_FACTOR = 0.99f;
124
125
126 static final float DEFAULT_SINGLE_FACTOR = 0.25f;
127 static final float DEFAULT_MULTI_FACTOR = 0.50f;
128 static final float DEFAULT_MEMORY_FACTOR = 0.25f;
129
130 static final boolean DEFAULT_IN_MEMORY_FORCE_MODE = false;
131
132
133 static final int statThreadPeriod = 60 * 5;
134
135
136 private final ConcurrentHashMap<BlockCacheKey,CachedBlock> map;
137
138
139 private final ReentrantLock evictionLock = new ReentrantLock(true);
140
141
142 private volatile boolean evictionInProgress = false;
143
144
145 private final EvictionThread evictionThread;
146
147
148 private final ScheduledExecutorService scheduleThreadPool =
149 Executors.newScheduledThreadPool(1,
150 new ThreadFactoryBuilder()
151 .setNameFormat("LruStats #%d")
152 .setDaemon(true)
153 .build());
154
155
156 private final AtomicLong size;
157
158
159 private final AtomicLong elements;
160
161
162 private final AtomicLong count;
163
164
165 private final CacheStats stats;
166
167
168 private long maxSize;
169
170
171 private long blockSize;
172
173
174 private float acceptableFactor;
175
176
177 private float minFactor;
178
179
180 private float singleFactor;
181
182
183 private float multiFactor;
184
185
186 private float memoryFactor;
187
188
189 private long overhead;
190
191
192 private boolean forceInMemory;
193
194
195 private BucketCache victimHandler = null;
196
197
198
199
200
201
202
203
204
205
206 public LruBlockCache(long maxSize, long blockSize) {
207 this(maxSize, blockSize, true);
208 }
209
210
211
212
213 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread) {
214 this(maxSize, blockSize, evictionThread,
215 (int)Math.ceil(1.2*maxSize/blockSize),
216 DEFAULT_LOAD_FACTOR, DEFAULT_CONCURRENCY_LEVEL,
217 DEFAULT_MIN_FACTOR, DEFAULT_ACCEPTABLE_FACTOR,
218 DEFAULT_SINGLE_FACTOR,
219 DEFAULT_MULTI_FACTOR,
220 DEFAULT_MEMORY_FACTOR,
221 false
222 );
223 }
224
225 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Configuration conf) {
226 this(maxSize, blockSize, evictionThread,
227 (int)Math.ceil(1.2*maxSize/blockSize),
228 DEFAULT_LOAD_FACTOR,
229 DEFAULT_CONCURRENCY_LEVEL,
230 conf.getFloat(LRU_MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR),
231 conf.getFloat(LRU_ACCEPTABLE_FACTOR_CONFIG_NAME, DEFAULT_ACCEPTABLE_FACTOR),
232 conf.getFloat(LRU_SINGLE_PERCENTAGE_CONFIG_NAME, DEFAULT_SINGLE_FACTOR),
233 conf.getFloat(LRU_MULTI_PERCENTAGE_CONFIG_NAME, DEFAULT_MULTI_FACTOR),
234 conf.getFloat(LRU_MEMORY_PERCENTAGE_CONFIG_NAME, DEFAULT_MEMORY_FACTOR),
235 conf.getBoolean(LRU_IN_MEMORY_FORCE_MODE_CONFIG_NAME, DEFAULT_IN_MEMORY_FORCE_MODE)
236 );
237 }
238
239 public LruBlockCache(long maxSize, long blockSize, Configuration conf) {
240 this(maxSize, blockSize, true, conf);
241 }
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread,
258 int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel,
259 float minFactor, float acceptableFactor, float singleFactor,
260 float multiFactor, float memoryFactor, boolean forceInMemory) {
261 if(singleFactor + multiFactor + memoryFactor != 1 ||
262 singleFactor < 0 || multiFactor < 0 || memoryFactor < 0) {
263 throw new IllegalArgumentException("Single, multi, and memory factors " +
264 " should be non-negative and total 1.0");
265 }
266 if(minFactor >= acceptableFactor) {
267 throw new IllegalArgumentException("minFactor must be smaller than acceptableFactor");
268 }
269 if(minFactor >= 1.0f || acceptableFactor >= 1.0f) {
270 throw new IllegalArgumentException("all factors must be < 1");
271 }
272 this.maxSize = maxSize;
273 this.blockSize = blockSize;
274 this.forceInMemory = forceInMemory;
275 map = new ConcurrentHashMap<BlockCacheKey,CachedBlock>(mapInitialSize,
276 mapLoadFactor, mapConcurrencyLevel);
277 this.minFactor = minFactor;
278 this.acceptableFactor = acceptableFactor;
279 this.singleFactor = singleFactor;
280 this.multiFactor = multiFactor;
281 this.memoryFactor = memoryFactor;
282 this.stats = new CacheStats();
283 this.count = new AtomicLong(0);
284 this.elements = new AtomicLong(0);
285 this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
286 this.size = new AtomicLong(this.overhead);
287 if(evictionThread) {
288 this.evictionThread = new EvictionThread(this);
289 this.evictionThread.start();
290 } else {
291 this.evictionThread = null;
292 }
293 this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),
294 statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);
295 }
296
297 public void setMaxSize(long maxSize) {
298 this.maxSize = maxSize;
299 if(this.size.get() > acceptableSize() && !evictionInProgress) {
300 runEviction();
301 }
302 }
303
304
305
306
307
308
309
310
311
312
313
314
315 @Override
316 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) {
317 CachedBlock cb = map.get(cacheKey);
318 if(cb != null) {
319
320 if (compare(buf, cb.getBuffer()) != 0) {
321 throw new RuntimeException("Cached block contents differ, which should not have happened."
322 + "cacheKey:" + cacheKey);
323 }
324 String msg = "Cached an already cached block: " + cacheKey + " cb:" + cb.getCacheKey();
325 msg += ". This is harmless and can happen in rare cases (see HBASE-8547)";
326 LOG.warn(msg);
327 return;
328 }
329 cb = new CachedBlock(cacheKey, buf, count.incrementAndGet(), inMemory);
330 long newSize = updateSizeMetrics(cb, false);
331 map.put(cacheKey, cb);
332 elements.incrementAndGet();
333 if(newSize > acceptableSize() && !evictionInProgress) {
334 runEviction();
335 }
336 }
337
338 private int compare(Cacheable left, Cacheable right) {
339 ByteBuffer l = ByteBuffer.allocate(left.getSerializedLength());
340 left.serialize(l);
341 ByteBuffer r = ByteBuffer.allocate(right.getSerializedLength());
342 right.serialize(r);
343 return Bytes.compareTo(l.array(), l.arrayOffset(), l.limit(),
344 r.array(), r.arrayOffset(), r.limit());
345 }
346
347
348
349
350
351
352
353
354
355
356
357 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {
358 cacheBlock(cacheKey, buf, false);
359 }
360
361
362
363
364
365
366
367
368
369 protected long updateSizeMetrics(CachedBlock cb, boolean evict) {
370 long heapsize = cb.heapSize();
371 if (evict) {
372 heapsize *= -1;
373 }
374 return size.addAndGet(heapsize);
375 }
376
377
378
379
380
381
382
383
384
385
386 @Override
387 public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat) {
388 CachedBlock cb = map.get(cacheKey);
389 if(cb == null) {
390 if (!repeat) stats.miss(caching);
391 if (victimHandler != null)
392 return victimHandler.getBlock(cacheKey, caching, repeat);
393 return null;
394 }
395 stats.hit(caching);
396 cb.access(count.incrementAndGet());
397 return cb.getBuffer();
398 }
399
400
401
402
403
404
405 public boolean containsBlock(BlockCacheKey cacheKey) {
406 return map.containsKey(cacheKey);
407 }
408
409 @Override
410 public boolean evictBlock(BlockCacheKey cacheKey) {
411 CachedBlock cb = map.get(cacheKey);
412 if (cb == null) return false;
413 evictBlock(cb, false);
414 return true;
415 }
416
417
418
419
420
421
422
423
424
425
426
427 @Override
428 public int evictBlocksByHfileName(String hfileName) {
429 int numEvicted = 0;
430 for (BlockCacheKey key : map.keySet()) {
431 if (key.getHfileName().equals(hfileName)) {
432 if (evictBlock(key))
433 ++numEvicted;
434 }
435 }
436 if (victimHandler != null) {
437 numEvicted += victimHandler.evictBlocksByHfileName(hfileName);
438 }
439 return numEvicted;
440 }
441
442
443
444
445
446
447
448
449
450 protected long evictBlock(CachedBlock block, boolean evictedByEvictionProcess) {
451 map.remove(block.getCacheKey());
452 updateSizeMetrics(block, true);
453 elements.decrementAndGet();
454 stats.evicted();
455 if (evictedByEvictionProcess && victimHandler != null) {
456 boolean wait = getCurrentSize() < acceptableSize();
457 boolean inMemory = block.getPriority() == BlockPriority.MEMORY;
458 victimHandler.cacheBlockWithWait(block.getCacheKey(), block.getBuffer(),
459 inMemory, wait);
460 }
461 return block.heapSize();
462 }
463
464
465
466
467 private void runEviction() {
468 if(evictionThread == null) {
469 evict();
470 } else {
471 evictionThread.evict();
472 }
473 }
474
475
476
477
478 void evict() {
479
480
481 if(!evictionLock.tryLock()) return;
482
483 try {
484 evictionInProgress = true;
485 long currentSize = this.size.get();
486 long bytesToFree = currentSize - minSize();
487
488 if (LOG.isTraceEnabled()) {
489 LOG.trace("Block cache LRU eviction started; Attempting to free " +
490 StringUtils.byteDesc(bytesToFree) + " of total=" +
491 StringUtils.byteDesc(currentSize));
492 }
493
494 if(bytesToFree <= 0) return;
495
496
497 BlockBucket bucketSingle = new BlockBucket(bytesToFree, blockSize,
498 singleSize());
499 BlockBucket bucketMulti = new BlockBucket(bytesToFree, blockSize,
500 multiSize());
501 BlockBucket bucketMemory = new BlockBucket(bytesToFree, blockSize,
502 memorySize());
503
504
505 for(CachedBlock cachedBlock : map.values()) {
506 switch(cachedBlock.getPriority()) {
507 case SINGLE: {
508 bucketSingle.add(cachedBlock);
509 break;
510 }
511 case MULTI: {
512 bucketMulti.add(cachedBlock);
513 break;
514 }
515 case MEMORY: {
516 bucketMemory.add(cachedBlock);
517 break;
518 }
519 }
520 }
521
522 long bytesFreed = 0;
523 if (forceInMemory || memoryFactor > 0.999f) {
524 long s = bucketSingle.totalSize();
525 long m = bucketMulti.totalSize();
526 if (bytesToFree > (s + m)) {
527
528
529 bytesFreed = bucketSingle.free(s);
530 bytesFreed += bucketMulti.free(m);
531 bytesFreed += bucketMemory.free(bytesToFree - bytesFreed);
532 } else {
533
534
535
536 long bytesRemain = s + m - bytesToFree;
537 if (3 * s <= bytesRemain) {
538
539
540 bytesFreed = bucketMulti.free(bytesToFree);
541 } else if (3 * m <= 2 * bytesRemain) {
542
543
544 bytesFreed = bucketSingle.free(bytesToFree);
545 } else {
546
547 bytesFreed = bucketSingle.free(s - bytesRemain / 3);
548 if (bytesFreed < bytesToFree) {
549 bytesFreed += bucketMulti.free(bytesToFree - bytesFreed);
550 }
551 }
552 }
553 } else {
554 PriorityQueue<BlockBucket> bucketQueue =
555 new PriorityQueue<BlockBucket>(3);
556
557 bucketQueue.add(bucketSingle);
558 bucketQueue.add(bucketMulti);
559 bucketQueue.add(bucketMemory);
560
561 int remainingBuckets = 3;
562
563 BlockBucket bucket;
564 while((bucket = bucketQueue.poll()) != null) {
565 long overflow = bucket.overflow();
566 if(overflow > 0) {
567 long bucketBytesToFree = Math.min(overflow,
568 (bytesToFree - bytesFreed) / remainingBuckets);
569 bytesFreed += bucket.free(bucketBytesToFree);
570 }
571 remainingBuckets--;
572 }
573 }
574
575 if (LOG.isTraceEnabled()) {
576 long single = bucketSingle.totalSize();
577 long multi = bucketMulti.totalSize();
578 long memory = bucketMemory.totalSize();
579 LOG.trace("Block cache LRU eviction completed; " +
580 "freed=" + StringUtils.byteDesc(bytesFreed) + ", " +
581 "total=" + StringUtils.byteDesc(this.size.get()) + ", " +
582 "single=" + StringUtils.byteDesc(single) + ", " +
583 "multi=" + StringUtils.byteDesc(multi) + ", " +
584 "memory=" + StringUtils.byteDesc(memory));
585 }
586 } finally {
587 stats.evict();
588 evictionInProgress = false;
589 evictionLock.unlock();
590 }
591 }
592
593
594
595
596
597
598
599 private class BlockBucket implements Comparable<BlockBucket> {
600
601 private CachedBlockQueue queue;
602 private long totalSize = 0;
603 private long bucketSize;
604
605 public BlockBucket(long bytesToFree, long blockSize, long bucketSize) {
606 this.bucketSize = bucketSize;
607 queue = new CachedBlockQueue(bytesToFree, blockSize);
608 totalSize = 0;
609 }
610
611 public void add(CachedBlock block) {
612 totalSize += block.heapSize();
613 queue.add(block);
614 }
615
616 public long free(long toFree) {
617 CachedBlock cb;
618 long freedBytes = 0;
619 while ((cb = queue.pollLast()) != null) {
620 freedBytes += evictBlock(cb, true);
621 if (freedBytes >= toFree) {
622 return freedBytes;
623 }
624 }
625 return freedBytes;
626 }
627
628 public long overflow() {
629 return totalSize - bucketSize;
630 }
631
632 public long totalSize() {
633 return totalSize;
634 }
635
636 public int compareTo(BlockBucket that) {
637 if(this.overflow() == that.overflow()) return 0;
638 return this.overflow() > that.overflow() ? 1 : -1;
639 }
640
641 @Override
642 public boolean equals(Object that) {
643 if (that == null || !(that instanceof BlockBucket)){
644 return false;
645 }
646
647 return compareTo(( BlockBucket)that) == 0;
648 }
649
650 }
651
652
653
654
655
656 public long getMaxSize() {
657 return this.maxSize;
658 }
659
660 @Override
661 public long getCurrentSize() {
662 return this.size.get();
663 }
664
665 @Override
666 public long getFreeSize() {
667 return getMaxSize() - getCurrentSize();
668 }
669
670 @Override
671 public long size() {
672 return this.elements.get();
673 }
674
675 @Override
676 public long getBlockCount() {
677 return this.elements.get();
678 }
679
680
681
682
683 public long getEvictionCount() {
684 return this.stats.getEvictionCount();
685 }
686
687 @Override
688 public long getEvictedCount() {
689 return this.stats.getEvictedCount();
690 }
691
692 EvictionThread getEvictionThread() {
693 return this.evictionThread;
694 }
695
696
697
698
699
700
701
702 static class EvictionThread extends HasThread {
703 private WeakReference<LruBlockCache> cache;
704 private boolean go = true;
705
706 private boolean enteringRun = false;
707
708 public EvictionThread(LruBlockCache cache) {
709 super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
710 setDaemon(true);
711 this.cache = new WeakReference<LruBlockCache>(cache);
712 }
713
714 @Override
715 public void run() {
716 enteringRun = true;
717 while (this.go) {
718 synchronized(this) {
719 try {
720 this.wait();
721 } catch(InterruptedException e) {}
722 }
723 LruBlockCache cache = this.cache.get();
724 if(cache == null) break;
725 cache.evict();
726 }
727 }
728
729 public void evict() {
730 synchronized(this) {
731 this.notifyAll();
732 }
733 }
734
735 synchronized void shutdown() {
736 this.go = false;
737 this.notifyAll();
738 }
739
740
741
742
743 boolean isEnteringRun() {
744 return this.enteringRun;
745 }
746 }
747
748
749
750
751 static class StatisticsThread extends Thread {
752 LruBlockCache lru;
753
754 public StatisticsThread(LruBlockCache lru) {
755 super("LruBlockCache.StatisticsThread");
756 setDaemon(true);
757 this.lru = lru;
758 }
759 @Override
760 public void run() {
761 lru.logStats();
762 }
763 }
764
765 public void logStats() {
766 if (!LOG.isDebugEnabled()) return;
767
768 long totalSize = heapSize();
769 long freeSize = maxSize - totalSize;
770 LruBlockCache.LOG.debug("Total=" + StringUtils.byteDesc(totalSize) + ", " +
771 "free=" + StringUtils.byteDesc(freeSize) + ", " +
772 "max=" + StringUtils.byteDesc(this.maxSize) + ", " +
773 "blocks=" + size() +", " +
774 "accesses=" + stats.getRequestCount() + ", " +
775 "hits=" + stats.getHitCount() + ", " +
776 "hitRatio=" +
777 (stats.getHitCount() == 0 ? "0" : (StringUtils.formatPercent(stats.getHitRatio(), 2)+ ", ")) + ", " +
778 "cachingAccesses=" + stats.getRequestCachingCount() + ", " +
779 "cachingHits=" + stats.getHitCachingCount() + ", " +
780 "cachingHitsRatio=" +
781 (stats.getHitCachingCount() == 0 ? "0,": (StringUtils.formatPercent(stats.getHitCachingRatio(), 2) + ", ")) +
782 "evictions=" + stats.getEvictionCount() + ", " +
783 "evicted=" + stats.getEvictedCount() + ", " +
784 "evictedPerRun=" + stats.evictedPerEviction());
785 }
786
787
788
789
790
791
792
793 public CacheStats getStats() {
794 return this.stats;
795 }
796
797 public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
798 (3 * Bytes.SIZEOF_LONG) + (9 * ClassSize.REFERENCE) +
799 (5 * Bytes.SIZEOF_FLOAT) + Bytes.SIZEOF_BOOLEAN
800 + ClassSize.OBJECT);
801
802
803 public long heapSize() {
804 return getCurrentSize();
805 }
806
807 public static long calculateOverhead(long maxSize, long blockSize, int concurrency){
808
809 return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
810 ((long)Math.ceil(maxSize*1.2/blockSize)
811 * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
812 ((long)concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
813 }
814
815 @Override
816 public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries(Configuration conf) throws IOException {
817
818 Map<String, Path> sfMap = FSUtils.getTableStoreFilePathMap(
819 FileSystem.get(conf),
820 FSUtils.getRootDir(conf));
821
822
823
824 Map<BlockCacheColumnFamilySummary, BlockCacheColumnFamilySummary> bcs =
825 new HashMap<BlockCacheColumnFamilySummary, BlockCacheColumnFamilySummary>();
826
827 for (CachedBlock cb : map.values()) {
828 String sf = cb.getCacheKey().getHfileName();
829 Path path = sfMap.get(sf);
830 if ( path != null) {
831 BlockCacheColumnFamilySummary lookup =
832 BlockCacheColumnFamilySummary.createFromStoreFilePath(path);
833 BlockCacheColumnFamilySummary bcse = bcs.get(lookup);
834 if (bcse == null) {
835 bcse = BlockCacheColumnFamilySummary.create(lookup);
836 bcs.put(lookup,bcse);
837 }
838 bcse.incrementBlocks();
839 bcse.incrementHeapSize(cb.heapSize());
840 }
841 }
842 List<BlockCacheColumnFamilySummary> list =
843 new ArrayList<BlockCacheColumnFamilySummary>(bcs.values());
844 Collections.sort( list );
845 return list;
846 }
847
848
849
850 private long acceptableSize() {
851 return (long)Math.floor(this.maxSize * this.acceptableFactor);
852 }
853 private long minSize() {
854 return (long)Math.floor(this.maxSize * this.minFactor);
855 }
856 private long singleSize() {
857 return (long)Math.floor(this.maxSize * this.singleFactor * this.minFactor);
858 }
859 private long multiSize() {
860 return (long)Math.floor(this.maxSize * this.multiFactor * this.minFactor);
861 }
862 private long memorySize() {
863 return (long)Math.floor(this.maxSize * this.memoryFactor * this.minFactor);
864 }
865
866 public void shutdown() {
867 if (victimHandler != null)
868 victimHandler.shutdown();
869 this.scheduleThreadPool.shutdown();
870 for (int i = 0; i < 10; i++) {
871 if (!this.scheduleThreadPool.isShutdown()) Threads.sleep(10);
872 }
873 if (!this.scheduleThreadPool.isShutdown()) {
874 List<Runnable> runnables = this.scheduleThreadPool.shutdownNow();
875 LOG.debug("Still running " + runnables);
876 }
877 this.evictionThread.shutdown();
878 }
879
880
881 public void clearCache() {
882 map.clear();
883 }
884
885
886
887
888
889 SortedSet<String> getCachedFileNamesForTest() {
890 SortedSet<String> fileNames = new TreeSet<String>();
891 for (BlockCacheKey cacheKey : map.keySet()) {
892 fileNames.add(cacheKey.getHfileName());
893 }
894 return fileNames;
895 }
896
897 Map<BlockType, Integer> getBlockTypeCountsForTest() {
898 Map<BlockType, Integer> counts =
899 new EnumMap<BlockType, Integer>(BlockType.class);
900 for (CachedBlock cb : map.values()) {
901 BlockType blockType = ((HFileBlock) cb.getBuffer()).getBlockType();
902 Integer count = counts.get(blockType);
903 counts.put(blockType, (count == null ? 0 : count) + 1);
904 }
905 return counts;
906 }
907
908 public Map<DataBlockEncoding, Integer> getEncodingCountsForTest() {
909 Map<DataBlockEncoding, Integer> counts =
910 new EnumMap<DataBlockEncoding, Integer>(DataBlockEncoding.class);
911 for (BlockCacheKey cacheKey : map.keySet()) {
912 DataBlockEncoding encoding = cacheKey.getDataBlockEncoding();
913 Integer count = counts.get(encoding);
914 counts.put(encoding, (count == null ? 0 : count) + 1);
915 }
916 return counts;
917 }
918
919 public void setVictimCache(BucketCache handler) {
920 assert victimHandler == null;
921 victimHandler = handler;
922 }
923
924 }