1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.io.hfile;
21
22 import java.io.IOException;
23 import java.lang.ref.WeakReference;
24 import java.util.ArrayList;
25 import java.util.Collections;
26 import java.util.EnumMap;
27 import java.util.HashMap;
28 import java.util.List;
29 import java.util.Map;
30 import java.util.PriorityQueue;
31 import java.util.SortedSet;
32 import java.util.TreeSet;
33 import java.util.concurrent.ConcurrentHashMap;
34 import java.util.concurrent.Executors;
35 import java.util.concurrent.ScheduledExecutorService;
36 import java.util.concurrent.TimeUnit;
37 import java.util.concurrent.atomic.AtomicLong;
38 import java.util.concurrent.locks.ReentrantLock;
39
40 import org.apache.commons.logging.Log;
41 import org.apache.commons.logging.LogFactory;
42 import org.apache.hadoop.conf.Configuration;
43 import org.apache.hadoop.fs.FileSystem;
44 import org.apache.hadoop.fs.Path;
45 import org.apache.hadoop.hbase.io.HeapSize;
46 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
47 import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
48 import org.apache.hadoop.hbase.util.Bytes;
49 import org.apache.hadoop.hbase.util.ClassSize;
50 import org.apache.hadoop.hbase.util.FSUtils;
51 import org.apache.hadoop.hbase.util.HasThread;
52 import org.apache.hadoop.hbase.util.Threads;
53 import org.apache.hadoop.util.StringUtils;
54
55 import com.google.common.util.concurrent.ThreadFactoryBuilder;
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95 public class LruBlockCache implements BlockCache, HeapSize {
96
97 static final Log LOG = LogFactory.getLog(LruBlockCache.class);
98
99 static final String LRU_MIN_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.min.factor";
100 static final String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.acceptable.factor";
101
102
103
104
105 static final float DEFAULT_LOAD_FACTOR = 0.75f;
106 static final int DEFAULT_CONCURRENCY_LEVEL = 16;
107
108
109 static final float DEFAULT_MIN_FACTOR = 0.75f;
110 static final float DEFAULT_ACCEPTABLE_FACTOR = 0.85f;
111
112
113 static final float DEFAULT_SINGLE_FACTOR = 0.25f;
114 static final float DEFAULT_MULTI_FACTOR = 0.50f;
115 static final float DEFAULT_MEMORY_FACTOR = 0.25f;
116
117
118 static final int statThreadPeriod = 60 * 5;
119
120
121 private final ConcurrentHashMap<BlockCacheKey,CachedBlock> map;
122
123
124 private final ReentrantLock evictionLock = new ReentrantLock(true);
125
126
127 private volatile boolean evictionInProgress = false;
128
129
130 private final EvictionThread evictionThread;
131
132
133 private final ScheduledExecutorService scheduleThreadPool =
134 Executors.newScheduledThreadPool(1,
135 new ThreadFactoryBuilder()
136 .setNameFormat("LRU Statistics #%d")
137 .setDaemon(true)
138 .build());
139
140
141 private final AtomicLong size;
142
143
144 private final AtomicLong elements;
145
146
147 private final AtomicLong count;
148
149
150 private final CacheStats stats;
151
152
153 private long maxSize;
154
155
156 private long blockSize;
157
158
159 private float acceptableFactor;
160
161
162 private float minFactor;
163
164
165 private float singleFactor;
166
167
168 private float multiFactor;
169
170
171 private float memoryFactor;
172
173
174 private long overhead;
175
176
177
178
179
180
181
182
183
184
185
186 public LruBlockCache(long maxSize, long blockSize, Configuration conf) {
187 this(maxSize, blockSize, true, conf);
188 }
189
190
191
192
193 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Configuration conf) {
194 this(maxSize, blockSize, evictionThread,
195 (int)Math.ceil(1.2*maxSize/blockSize),
196 DEFAULT_LOAD_FACTOR,
197 DEFAULT_CONCURRENCY_LEVEL,
198 conf.getFloat(LRU_MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR),
199 conf.getFloat(LRU_ACCEPTABLE_FACTOR_CONFIG_NAME, DEFAULT_ACCEPTABLE_FACTOR),
200 DEFAULT_SINGLE_FACTOR,
201 DEFAULT_MULTI_FACTOR,
202 DEFAULT_MEMORY_FACTOR);
203 }
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread,
221 int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel,
222 float minFactor, float acceptableFactor,
223 float singleFactor, float multiFactor, float memoryFactor) {
224 if(singleFactor + multiFactor + memoryFactor != 1) {
225 throw new IllegalArgumentException("Single, multi, and memory factors " +
226 " should total 1.0");
227 }
228 if(minFactor >= acceptableFactor) {
229 throw new IllegalArgumentException("minFactor must be smaller than acceptableFactor");
230 }
231 if(minFactor >= 1.0f || acceptableFactor >= 1.0f) {
232 throw new IllegalArgumentException("all factors must be < 1");
233 }
234 this.maxSize = maxSize;
235 this.blockSize = blockSize;
236 map = new ConcurrentHashMap<BlockCacheKey,CachedBlock>(mapInitialSize,
237 mapLoadFactor, mapConcurrencyLevel);
238 this.minFactor = minFactor;
239 this.acceptableFactor = acceptableFactor;
240 this.singleFactor = singleFactor;
241 this.multiFactor = multiFactor;
242 this.memoryFactor = memoryFactor;
243 this.stats = new CacheStats();
244 this.count = new AtomicLong(0);
245 this.elements = new AtomicLong(0);
246 this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
247 this.size = new AtomicLong(this.overhead);
248 if(evictionThread) {
249 this.evictionThread = new EvictionThread(this);
250 this.evictionThread.start();
251 } else {
252 this.evictionThread = null;
253 }
254 this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),
255 statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);
256 }
257
258 public void setMaxSize(long maxSize) {
259 this.maxSize = maxSize;
260 if(this.size.get() > acceptableSize() && !evictionInProgress) {
261 runEviction();
262 }
263 }
264
265
266
267
268
269
270
271
272
273
274
275
276 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) {
277 CachedBlock cb = map.get(cacheKey);
278 if(cb != null) {
279 throw new RuntimeException("Cached an already cached block");
280 }
281 cb = new CachedBlock(cacheKey, buf, count.incrementAndGet(), inMemory);
282 long newSize = updateSizeMetrics(cb, false);
283 map.put(cacheKey, cb);
284 elements.incrementAndGet();
285 if(newSize > acceptableSize() && !evictionInProgress) {
286 runEviction();
287 }
288 }
289
290
291
292
293
294
295
296
297
298
299
300 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {
301 cacheBlock(cacheKey, buf, false);
302 }
303
304
305
306
307
308
309
310
311
312 protected long updateSizeMetrics(CachedBlock cb, boolean evict) {
313 long heapsize = cb.heapSize();
314 if (evict) {
315 heapsize *= -1;
316 }
317 Cacheable cachedBlock = cb.getBuffer();
318 SchemaMetrics schemaMetrics = cachedBlock.getSchemaMetrics();
319 if (schemaMetrics != null) {
320 schemaMetrics.updateOnCachePutOrEvict(
321 cachedBlock.getBlockType().getCategory(), heapsize, evict);
322 }
323 return size.addAndGet(heapsize);
324 }
325
326
327
328
329
330
331
332
333
334
335 @Override
336 public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat) {
337 CachedBlock cb = map.get(cacheKey);
338 if(cb == null) {
339 if (!repeat) stats.miss(caching);
340 return null;
341 }
342 stats.hit(caching);
343 cb.access(count.incrementAndGet());
344 return cb.getBuffer();
345 }
346
347
348 @Override
349 public boolean evictBlock(BlockCacheKey cacheKey) {
350 CachedBlock cb = map.get(cacheKey);
351 if (cb == null) return false;
352 evictBlock(cb);
353 return true;
354 }
355
356
357
358
359
360
361
362
363
364
365
366 @Override
367 public int evictBlocksByHfileName(String hfileName) {
368 int numEvicted = 0;
369 for (BlockCacheKey key : map.keySet()) {
370 if (key.getHfileName().equals(hfileName)) {
371 if (evictBlock(key))
372 ++numEvicted;
373 }
374 }
375 return numEvicted;
376 }
377
378 protected long evictBlock(CachedBlock block) {
379 map.remove(block.getCacheKey());
380 updateSizeMetrics(block, true);
381 elements.decrementAndGet();
382 stats.evicted();
383 return block.heapSize();
384 }
385
386
387
388
389 private void runEviction() {
390 if(evictionThread == null) {
391 evict();
392 } else {
393 evictionThread.evict();
394 }
395 }
396
397
398
399
400 void evict() {
401
402
403 if(!evictionLock.tryLock()) return;
404
405 try {
406 evictionInProgress = true;
407 long currentSize = this.size.get();
408 long bytesToFree = currentSize - minSize();
409
410 if (LOG.isDebugEnabled()) {
411 LOG.debug("Block cache LRU eviction started; Attempting to free " +
412 StringUtils.byteDesc(bytesToFree) + " of total=" +
413 StringUtils.byteDesc(currentSize));
414 }
415
416 if(bytesToFree <= 0) return;
417
418
419 BlockBucket bucketSingle = new BlockBucket(bytesToFree, blockSize,
420 singleSize());
421 BlockBucket bucketMulti = new BlockBucket(bytesToFree, blockSize,
422 multiSize());
423 BlockBucket bucketMemory = new BlockBucket(bytesToFree, blockSize,
424 memorySize());
425
426
427 for(CachedBlock cachedBlock : map.values()) {
428 switch(cachedBlock.getPriority()) {
429 case SINGLE: {
430 bucketSingle.add(cachedBlock);
431 break;
432 }
433 case MULTI: {
434 bucketMulti.add(cachedBlock);
435 break;
436 }
437 case MEMORY: {
438 bucketMemory.add(cachedBlock);
439 break;
440 }
441 }
442 }
443
444 PriorityQueue<BlockBucket> bucketQueue =
445 new PriorityQueue<BlockBucket>(3);
446
447 bucketQueue.add(bucketSingle);
448 bucketQueue.add(bucketMulti);
449 bucketQueue.add(bucketMemory);
450
451 int remainingBuckets = 3;
452 long bytesFreed = 0;
453
454 BlockBucket bucket;
455 while((bucket = bucketQueue.poll()) != null) {
456 long overflow = bucket.overflow();
457 if(overflow > 0) {
458 long bucketBytesToFree = Math.min(overflow,
459 (bytesToFree - bytesFreed) / remainingBuckets);
460 bytesFreed += bucket.free(bucketBytesToFree);
461 }
462 remainingBuckets--;
463 }
464
465 if (LOG.isDebugEnabled()) {
466 long single = bucketSingle.totalSize();
467 long multi = bucketMulti.totalSize();
468 long memory = bucketMemory.totalSize();
469 LOG.debug("Block cache LRU eviction completed; " +
470 "freed=" + StringUtils.byteDesc(bytesFreed) + ", " +
471 "total=" + StringUtils.byteDesc(this.size.get()) + ", " +
472 "single=" + StringUtils.byteDesc(single) + ", " +
473 "multi=" + StringUtils.byteDesc(multi) + ", " +
474 "memory=" + StringUtils.byteDesc(memory));
475 }
476 } finally {
477 stats.evict();
478 evictionInProgress = false;
479 evictionLock.unlock();
480 }
481 }
482
483
484
485
486
487
488
489 private class BlockBucket implements Comparable<BlockBucket> {
490
491 private CachedBlockQueue queue;
492 private long totalSize = 0;
493 private long bucketSize;
494
495 public BlockBucket(long bytesToFree, long blockSize, long bucketSize) {
496 this.bucketSize = bucketSize;
497 queue = new CachedBlockQueue(bytesToFree, blockSize);
498 totalSize = 0;
499 }
500
501 public void add(CachedBlock block) {
502 totalSize += block.heapSize();
503 queue.add(block);
504 }
505
506 public long free(long toFree) {
507 CachedBlock cb;
508 long freedBytes = 0;
509 while ((cb = queue.pollLast()) != null) {
510 freedBytes += evictBlock(cb);
511 if (freedBytes >= toFree) {
512 return freedBytes;
513 }
514 }
515 return freedBytes;
516 }
517
518 public long overflow() {
519 return totalSize - bucketSize;
520 }
521
522 public long totalSize() {
523 return totalSize;
524 }
525
526 public int compareTo(BlockBucket that) {
527 if(this.overflow() == that.overflow()) return 0;
528 return this.overflow() > that.overflow() ? 1 : -1;
529 }
530 }
531
532
533
534
535
536 public long getMaxSize() {
537 return this.maxSize;
538 }
539
540
541
542
543
544 public long getCurrentSize() {
545 return this.size.get();
546 }
547
548
549
550
551
552 public long getFreeSize() {
553 return getMaxSize() - getCurrentSize();
554 }
555
556
557
558
559
560 public long size() {
561 return this.elements.get();
562 }
563
564 @Override
565 public long getBlockCount() {
566 return this.elements.get();
567 }
568
569
570
571
572 public long getEvictionCount() {
573 return this.stats.getEvictionCount();
574 }
575
576
577
578
579
580 public long getEvictedCount() {
581 return this.stats.getEvictedCount();
582 }
583
584 EvictionThread getEvictionThread() {
585 return this.evictionThread;
586 }
587
588
589
590
591
592
593
594 static class EvictionThread extends HasThread {
595 private WeakReference<LruBlockCache> cache;
596 private boolean go = true;
597
598 private boolean enteringRun = false;
599
600 public EvictionThread(LruBlockCache cache) {
601 super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
602 setDaemon(true);
603 this.cache = new WeakReference<LruBlockCache>(cache);
604 }
605
606 @Override
607 public void run() {
608 enteringRun = true;
609 while (this.go) {
610 synchronized(this) {
611 try {
612 this.wait();
613 } catch(InterruptedException e) {}
614 }
615 LruBlockCache cache = this.cache.get();
616 if(cache == null) break;
617 cache.evict();
618 }
619 }
620
621 public void evict() {
622 synchronized(this) {
623 this.notify();
624 }
625 }
626
627 void shutdown() {
628 this.go = false;
629 interrupt();
630 }
631
632
633
634
635 boolean isEnteringRun() {
636 return this.enteringRun;
637 }
638 }
639
640
641
642
643 static class StatisticsThread extends Thread {
644 LruBlockCache lru;
645
646 public StatisticsThread(LruBlockCache lru) {
647 super("LruBlockCache.StatisticsThread");
648 setDaemon(true);
649 this.lru = lru;
650 }
651 @Override
652 public void run() {
653 lru.logStats();
654 }
655 }
656
657 public void logStats() {
658 if (!LOG.isDebugEnabled()) return;
659
660 long totalSize = heapSize();
661 long freeSize = maxSize - totalSize;
662 LruBlockCache.LOG.debug("Stats: " +
663 "total=" + StringUtils.byteDesc(totalSize) + ", " +
664 "free=" + StringUtils.byteDesc(freeSize) + ", " +
665 "max=" + StringUtils.byteDesc(this.maxSize) + ", " +
666 "blocks=" + size() +", " +
667 "accesses=" + stats.getRequestCount() + ", " +
668 "hits=" + stats.getHitCount() + ", " +
669 "hitRatio=" +
670 (stats.getHitCount() == 0 ? "0" : (StringUtils.formatPercent(stats.getHitRatio(), 2)+ ", ")) + ", " +
671 "cachingAccesses=" + stats.getRequestCachingCount() + ", " +
672 "cachingHits=" + stats.getHitCachingCount() + ", " +
673 "cachingHitsRatio=" +
674 (stats.getHitCachingCount() == 0 ? "0" : (StringUtils.formatPercent(stats.getHitCachingRatio(), 2)+ ", ")) + ", " +
675 "evictions=" + stats.getEvictionCount() + ", " +
676 "evicted=" + stats.getEvictedCount() + ", " +
677 "evictedPerRun=" + stats.evictedPerEviction());
678 }
679
680
681
682
683
684
685
686 public CacheStats getStats() {
687 return this.stats;
688 }
689
690 public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
691 (3 * Bytes.SIZEOF_LONG) + (8 * ClassSize.REFERENCE) +
692 (5 * Bytes.SIZEOF_FLOAT) + Bytes.SIZEOF_BOOLEAN
693 + ClassSize.OBJECT);
694
695
696 public long heapSize() {
697 return getCurrentSize();
698 }
699
700 public static long calculateOverhead(long maxSize, long blockSize, int concurrency){
701
702 return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
703 ((long)Math.ceil(maxSize*1.2/blockSize)
704 * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
705 (concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
706 }
707
708 @Override
709 public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries(Configuration conf) throws IOException {
710
711 Map<String, Path> sfMap = FSUtils.getTableStoreFilePathMap(
712 FileSystem.get(conf),
713 FSUtils.getRootDir(conf));
714
715
716
717 Map<BlockCacheColumnFamilySummary, BlockCacheColumnFamilySummary> bcs =
718 new HashMap<BlockCacheColumnFamilySummary, BlockCacheColumnFamilySummary>();
719
720 for (CachedBlock cb : map.values()) {
721 String sf = cb.getCacheKey().getHfileName();
722 Path path = sfMap.get(sf);
723 if ( path != null) {
724 BlockCacheColumnFamilySummary lookup =
725 BlockCacheColumnFamilySummary.createFromStoreFilePath(path);
726 BlockCacheColumnFamilySummary bcse = bcs.get(lookup);
727 if (bcse == null) {
728 bcse = BlockCacheColumnFamilySummary.create(lookup);
729 bcs.put(lookup,bcse);
730 }
731 bcse.incrementBlocks();
732 bcse.incrementHeapSize(cb.heapSize());
733 }
734 }
735 List<BlockCacheColumnFamilySummary> list =
736 new ArrayList<BlockCacheColumnFamilySummary>(bcs.values());
737 Collections.sort( list );
738 return list;
739 }
740
741
742
743 private long acceptableSize() {
744 return (long)Math.floor(this.maxSize * this.acceptableFactor);
745 }
746 private long minSize() {
747 return (long)Math.floor(this.maxSize * this.minFactor);
748 }
749 private long singleSize() {
750 return (long)Math.floor(this.maxSize * this.singleFactor * this.minFactor);
751 }
752 private long multiSize() {
753 return (long)Math.floor(this.maxSize * this.multiFactor * this.minFactor);
754 }
755 private long memorySize() {
756 return (long)Math.floor(this.maxSize * this.memoryFactor * this.minFactor);
757 }
758
759 public void shutdown() {
760 this.scheduleThreadPool.shutdown();
761 for (int i = 0; i < 10; i++) {
762 if (!this.scheduleThreadPool.isShutdown()) Threads.sleep(10);
763 }
764 if (!this.scheduleThreadPool.isShutdown()) {
765 List<Runnable> runnables = this.scheduleThreadPool.shutdownNow();
766 LOG.debug("Still running " + runnables);
767 }
768 this.evictionThread.shutdown();
769 }
770
771
772 public void clearCache() {
773 map.clear();
774 }
775
776
777
778
779
780 SortedSet<String> getCachedFileNamesForTest() {
781 SortedSet<String> fileNames = new TreeSet<String>();
782 for (BlockCacheKey cacheKey : map.keySet()) {
783 fileNames.add(cacheKey.getHfileName());
784 }
785 return fileNames;
786 }
787
788 Map<BlockType, Integer> getBlockTypeCountsForTest() {
789 Map<BlockType, Integer> counts =
790 new EnumMap<BlockType, Integer>(BlockType.class);
791 for (CachedBlock cb : map.values()) {
792 BlockType blockType = ((HFileBlock) cb.getBuffer()).getBlockType();
793 Integer count = counts.get(blockType);
794 counts.put(blockType, (count == null ? 0 : count) + 1);
795 }
796 return counts;
797 }
798
799 public Map<DataBlockEncoding, Integer> getEncodingCountsForTest() {
800 Map<DataBlockEncoding, Integer> counts =
801 new EnumMap<DataBlockEncoding, Integer>(DataBlockEncoding.class);
802 for (BlockCacheKey cacheKey : map.keySet()) {
803 DataBlockEncoding encoding = cacheKey.getDataBlockEncoding();
804 Integer count = counts.get(encoding);
805 counts.put(encoding, (count == null ? 0 : count) + 1);
806 }
807 return counts;
808 }
809
810 }