1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.io.hfile;
21
22 import java.io.IOException;
23 import java.lang.ref.WeakReference;
24 import java.nio.ByteBuffer;
25 import java.util.ArrayList;
26 import java.util.Collections;
27 import java.util.EnumMap;
28 import java.util.HashMap;
29 import java.util.List;
30 import java.util.Map;
31 import java.util.PriorityQueue;
32 import java.util.SortedSet;
33 import java.util.TreeSet;
34 import java.util.concurrent.ConcurrentHashMap;
35 import java.util.concurrent.Executors;
36 import java.util.concurrent.ScheduledExecutorService;
37 import java.util.concurrent.TimeUnit;
38 import java.util.concurrent.atomic.AtomicLong;
39 import java.util.concurrent.locks.ReentrantLock;
40
41 import org.apache.commons.logging.Log;
42 import org.apache.commons.logging.LogFactory;
43 import org.apache.hadoop.conf.Configuration;
44 import org.apache.hadoop.fs.FileSystem;
45 import org.apache.hadoop.fs.Path;
46 import org.apache.hadoop.hbase.io.HeapSize;
47 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
48 import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
49 import org.apache.hadoop.hbase.util.Bytes;
50 import org.apache.hadoop.hbase.util.ClassSize;
51 import org.apache.hadoop.hbase.util.FSUtils;
52 import org.apache.hadoop.hbase.util.HasThread;
53 import org.apache.hadoop.hbase.util.Threads;
54 import org.apache.hadoop.util.StringUtils;
55
56 import com.google.common.util.concurrent.ThreadFactoryBuilder;
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96 public class LruBlockCache implements BlockCache, HeapSize {
97
98 static final Log LOG = LogFactory.getLog(LruBlockCache.class);
99
100 static final String LRU_MIN_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.min.factor";
101 static final String LRU_ACCEPTABLE_FACTOR_CONFIG_NAME = "hbase.lru.blockcache.acceptable.factor";
102
103
104
105
106 static final float DEFAULT_LOAD_FACTOR = 0.75f;
107 static final int DEFAULT_CONCURRENCY_LEVEL = 16;
108
109
110 static final float DEFAULT_MIN_FACTOR = 0.75f;
111 static final float DEFAULT_ACCEPTABLE_FACTOR = 0.85f;
112
113
114 static final float DEFAULT_SINGLE_FACTOR = 0.25f;
115 static final float DEFAULT_MULTI_FACTOR = 0.50f;
116 static final float DEFAULT_MEMORY_FACTOR = 0.25f;
117
118
119 static final int statThreadPeriod = 60 * 5;
120
121
122 private final ConcurrentHashMap<BlockCacheKey,CachedBlock> map;
123
124
125 private final ReentrantLock evictionLock = new ReentrantLock(true);
126
127
128 private volatile boolean evictionInProgress = false;
129
130
131 private final EvictionThread evictionThread;
132
133
134 private final ScheduledExecutorService scheduleThreadPool =
135 Executors.newScheduledThreadPool(1,
136 new ThreadFactoryBuilder()
137 .setNameFormat("LRU Statistics #%d")
138 .setDaemon(true)
139 .build());
140
141
142 private final AtomicLong size;
143
144
145 private final AtomicLong elements;
146
147
148 private final AtomicLong count;
149
150
151 private final CacheStats stats;
152
153
154 private long maxSize;
155
156
157 private long blockSize;
158
159
160 private float acceptableFactor;
161
162
163 private float minFactor;
164
165
166 private float singleFactor;
167
168
169 private float multiFactor;
170
171
172 private float memoryFactor;
173
174
175 private long overhead;
176
177
178
179
180
181
182
183
184
185
186
187 public LruBlockCache(long maxSize, long blockSize, Configuration conf) {
188 this(maxSize, blockSize, true, conf);
189 }
190
191
192
193
194 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, Configuration conf) {
195 this(maxSize, blockSize, evictionThread,
196 (int)Math.ceil(1.2*maxSize/blockSize),
197 DEFAULT_LOAD_FACTOR,
198 DEFAULT_CONCURRENCY_LEVEL,
199 conf.getFloat(LRU_MIN_FACTOR_CONFIG_NAME, DEFAULT_MIN_FACTOR),
200 conf.getFloat(LRU_ACCEPTABLE_FACTOR_CONFIG_NAME, DEFAULT_ACCEPTABLE_FACTOR),
201 DEFAULT_SINGLE_FACTOR,
202 DEFAULT_MULTI_FACTOR,
203 DEFAULT_MEMORY_FACTOR);
204 }
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221 public LruBlockCache(long maxSize, long blockSize, boolean evictionThread,
222 int mapInitialSize, float mapLoadFactor, int mapConcurrencyLevel,
223 float minFactor, float acceptableFactor,
224 float singleFactor, float multiFactor, float memoryFactor) {
225 if(singleFactor + multiFactor + memoryFactor != 1) {
226 throw new IllegalArgumentException("Single, multi, and memory factors " +
227 " should total 1.0");
228 }
229 if(minFactor >= acceptableFactor) {
230 throw new IllegalArgumentException("minFactor must be smaller than acceptableFactor");
231 }
232 if(minFactor >= 1.0f || acceptableFactor >= 1.0f) {
233 throw new IllegalArgumentException("all factors must be < 1");
234 }
235 this.maxSize = maxSize;
236 this.blockSize = blockSize;
237 map = new ConcurrentHashMap<BlockCacheKey,CachedBlock>(mapInitialSize,
238 mapLoadFactor, mapConcurrencyLevel);
239 this.minFactor = minFactor;
240 this.acceptableFactor = acceptableFactor;
241 this.singleFactor = singleFactor;
242 this.multiFactor = multiFactor;
243 this.memoryFactor = memoryFactor;
244 this.stats = new CacheStats();
245 this.count = new AtomicLong(0);
246 this.elements = new AtomicLong(0);
247 this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
248 this.size = new AtomicLong(this.overhead);
249 if(evictionThread) {
250 this.evictionThread = new EvictionThread(this);
251 this.evictionThread.start();
252 } else {
253 this.evictionThread = null;
254 }
255 this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),
256 statThreadPeriod, statThreadPeriod, TimeUnit.SECONDS);
257 }
258
259 public void setMaxSize(long maxSize) {
260 this.maxSize = maxSize;
261 if(this.size.get() > acceptableSize() && !evictionInProgress) {
262 runEviction();
263 }
264 }
265
266
267
268
269
270
271
272
273
274
275
276
277 @Override
278 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) {
279 CachedBlock cb = map.get(cacheKey);
280 if(cb != null) {
281
282 if (compare(buf, cb.getBuffer()) != 0) {
283 throw new RuntimeException("Cached block contents differ, which should not have happened."
284 + "cacheKey:" + cacheKey);
285 }
286 String msg = "Cached an already cached block: " + cacheKey + " cb:" + cb.getCacheKey();
287 msg += ". This is harmless and can happen in rare cases (see HBASE-8547)";
288 LOG.warn(msg);
289 return;
290 }
291 cb = new CachedBlock(cacheKey, buf, count.incrementAndGet(), inMemory);
292 long newSize = updateSizeMetrics(cb, false);
293 map.put(cacheKey, cb);
294 elements.incrementAndGet();
295 if(newSize > acceptableSize() && !evictionInProgress) {
296 runEviction();
297 }
298 }
299
300 private int compare(Cacheable left, Cacheable right) {
301 ByteBuffer l = ByteBuffer.allocate(left.getSerializedLength());
302 left.serialize(l);
303 ByteBuffer r = ByteBuffer.allocate(right.getSerializedLength());
304 right.serialize(r);
305 return Bytes.compareTo(l.array(), l.arrayOffset(), l.limit(),
306 r.array(), r.arrayOffset(), r.limit());
307 }
308
309
310
311
312
313
314
315
316
317
318
319 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) {
320 cacheBlock(cacheKey, buf, false);
321 }
322
323
324
325
326
327
328
329
330
331 protected long updateSizeMetrics(CachedBlock cb, boolean evict) {
332 long heapsize = cb.heapSize();
333 if (evict) {
334 heapsize *= -1;
335 }
336 Cacheable cachedBlock = cb.getBuffer();
337 SchemaMetrics schemaMetrics = cachedBlock.getSchemaMetrics();
338 if (schemaMetrics != null) {
339 schemaMetrics.updateOnCachePutOrEvict(
340 cachedBlock.getBlockType().getCategory(), heapsize, evict);
341 }
342 return size.addAndGet(heapsize);
343 }
344
345
346
347
348
349
350
351
352
353
354 @Override
355 public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat) {
356 CachedBlock cb = map.get(cacheKey);
357 if(cb == null) {
358 if (!repeat) stats.miss(caching);
359 return null;
360 }
361 stats.hit(caching);
362 cb.access(count.incrementAndGet());
363 return cb.getBuffer();
364 }
365
366
367 @Override
368 public boolean evictBlock(BlockCacheKey cacheKey) {
369 CachedBlock cb = map.get(cacheKey);
370 if (cb == null) return false;
371 evictBlock(cb);
372 return true;
373 }
374
375
376
377
378
379
380
381
382
383
384
385 @Override
386 public int evictBlocksByHfileName(String hfileName) {
387 int numEvicted = 0;
388 for (BlockCacheKey key : map.keySet()) {
389 if (key.getHfileName().equals(hfileName)) {
390 if (evictBlock(key))
391 ++numEvicted;
392 }
393 }
394 return numEvicted;
395 }
396
397 protected long evictBlock(CachedBlock block) {
398 map.remove(block.getCacheKey());
399 updateSizeMetrics(block, true);
400 elements.decrementAndGet();
401 stats.evicted();
402 return block.heapSize();
403 }
404
405
406
407
408 private void runEviction() {
409 if(evictionThread == null) {
410 evict();
411 } else {
412 evictionThread.evict();
413 }
414 }
415
416
417
418
419 void evict() {
420
421
422 if(!evictionLock.tryLock()) return;
423
424 try {
425 evictionInProgress = true;
426 long currentSize = this.size.get();
427 long bytesToFree = currentSize - minSize();
428
429 if (LOG.isDebugEnabled()) {
430 LOG.debug("Block cache LRU eviction started; Attempting to free " +
431 StringUtils.byteDesc(bytesToFree) + " of total=" +
432 StringUtils.byteDesc(currentSize));
433 }
434
435 if(bytesToFree <= 0) return;
436
437
438 BlockBucket bucketSingle = new BlockBucket(bytesToFree, blockSize,
439 singleSize());
440 BlockBucket bucketMulti = new BlockBucket(bytesToFree, blockSize,
441 multiSize());
442 BlockBucket bucketMemory = new BlockBucket(bytesToFree, blockSize,
443 memorySize());
444
445
446 for(CachedBlock cachedBlock : map.values()) {
447 switch(cachedBlock.getPriority()) {
448 case SINGLE: {
449 bucketSingle.add(cachedBlock);
450 break;
451 }
452 case MULTI: {
453 bucketMulti.add(cachedBlock);
454 break;
455 }
456 case MEMORY: {
457 bucketMemory.add(cachedBlock);
458 break;
459 }
460 }
461 }
462
463 PriorityQueue<BlockBucket> bucketQueue =
464 new PriorityQueue<BlockBucket>(3);
465
466 bucketQueue.add(bucketSingle);
467 bucketQueue.add(bucketMulti);
468 bucketQueue.add(bucketMemory);
469
470 int remainingBuckets = 3;
471 long bytesFreed = 0;
472
473 BlockBucket bucket;
474 while((bucket = bucketQueue.poll()) != null) {
475 long overflow = bucket.overflow();
476 if(overflow > 0) {
477 long bucketBytesToFree = Math.min(overflow,
478 (bytesToFree - bytesFreed) / remainingBuckets);
479 bytesFreed += bucket.free(bucketBytesToFree);
480 }
481 remainingBuckets--;
482 }
483
484 if (LOG.isDebugEnabled()) {
485 long single = bucketSingle.totalSize();
486 long multi = bucketMulti.totalSize();
487 long memory = bucketMemory.totalSize();
488 LOG.debug("Block cache LRU eviction completed; " +
489 "freed=" + StringUtils.byteDesc(bytesFreed) + ", " +
490 "total=" + StringUtils.byteDesc(this.size.get()) + ", " +
491 "single=" + StringUtils.byteDesc(single) + ", " +
492 "multi=" + StringUtils.byteDesc(multi) + ", " +
493 "memory=" + StringUtils.byteDesc(memory));
494 }
495 } finally {
496 stats.evict();
497 evictionInProgress = false;
498 evictionLock.unlock();
499 }
500 }
501
502
503
504
505
506
507
508 private class BlockBucket implements Comparable<BlockBucket> {
509
510 private CachedBlockQueue queue;
511 private long totalSize = 0;
512 private long bucketSize;
513
514 public BlockBucket(long bytesToFree, long blockSize, long bucketSize) {
515 this.bucketSize = bucketSize;
516 queue = new CachedBlockQueue(bytesToFree, blockSize);
517 totalSize = 0;
518 }
519
520 public void add(CachedBlock block) {
521 totalSize += block.heapSize();
522 queue.add(block);
523 }
524
525 public long free(long toFree) {
526 CachedBlock cb;
527 long freedBytes = 0;
528 while ((cb = queue.pollLast()) != null) {
529 freedBytes += evictBlock(cb);
530 if (freedBytes >= toFree) {
531 return freedBytes;
532 }
533 }
534 return freedBytes;
535 }
536
537 public long overflow() {
538 return totalSize - bucketSize;
539 }
540
541 public long totalSize() {
542 return totalSize;
543 }
544
545 public int compareTo(BlockBucket that) {
546 if(this.overflow() == that.overflow()) return 0;
547 return this.overflow() > that.overflow() ? 1 : -1;
548 }
549 }
550
551
552
553
554
555 public long getMaxSize() {
556 return this.maxSize;
557 }
558
559
560
561
562
563 public long getCurrentSize() {
564 return this.size.get();
565 }
566
567
568
569
570
571 public long getFreeSize() {
572 return getMaxSize() - getCurrentSize();
573 }
574
575
576
577
578
579 public long size() {
580 return this.elements.get();
581 }
582
583 @Override
584 public long getBlockCount() {
585 return this.elements.get();
586 }
587
588
589
590
591 public long getEvictionCount() {
592 return this.stats.getEvictionCount();
593 }
594
595
596
597
598
599 public long getEvictedCount() {
600 return this.stats.getEvictedCount();
601 }
602
603 EvictionThread getEvictionThread() {
604 return this.evictionThread;
605 }
606
607
608
609
610
611
612
613 static class EvictionThread extends HasThread {
614 private WeakReference<LruBlockCache> cache;
615 private boolean go = true;
616
617 private boolean enteringRun = false;
618
619 public EvictionThread(LruBlockCache cache) {
620 super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
621 setDaemon(true);
622 this.cache = new WeakReference<LruBlockCache>(cache);
623 }
624
625 @Override
626 public void run() {
627 enteringRun = true;
628 while (this.go) {
629 synchronized(this) {
630 try {
631 this.wait();
632 } catch(InterruptedException e) {}
633 }
634 LruBlockCache cache = this.cache.get();
635 if(cache == null) break;
636 cache.evict();
637 }
638 }
639
640 public void evict() {
641 synchronized(this) {
642 this.notify();
643 }
644 }
645
646 void shutdown() {
647 this.go = false;
648 interrupt();
649 }
650
651
652
653
654 boolean isEnteringRun() {
655 return this.enteringRun;
656 }
657 }
658
659
660
661
662 static class StatisticsThread extends Thread {
663 LruBlockCache lru;
664
665 public StatisticsThread(LruBlockCache lru) {
666 super("LruBlockCache.StatisticsThread");
667 setDaemon(true);
668 this.lru = lru;
669 }
670 @Override
671 public void run() {
672 lru.logStats();
673 }
674 }
675
676 public void logStats() {
677 if (!LOG.isDebugEnabled()) return;
678
679 long totalSize = heapSize();
680 long freeSize = maxSize - totalSize;
681 LruBlockCache.LOG.debug("Stats: " +
682 "total=" + StringUtils.byteDesc(totalSize) + ", " +
683 "free=" + StringUtils.byteDesc(freeSize) + ", " +
684 "max=" + StringUtils.byteDesc(this.maxSize) + ", " +
685 "blocks=" + size() +", " +
686 "accesses=" + stats.getRequestCount() + ", " +
687 "hits=" + stats.getHitCount() + ", " +
688 "hitRatio=" +
689 (stats.getHitCount() == 0 ? "0" : (StringUtils.formatPercent(stats.getHitRatio(), 2)+ ", ")) + ", " +
690 "cachingAccesses=" + stats.getRequestCachingCount() + ", " +
691 "cachingHits=" + stats.getHitCachingCount() + ", " +
692 "cachingHitsRatio=" +
693 (stats.getHitCachingCount() == 0 ? "0" : (StringUtils.formatPercent(stats.getHitCachingRatio(), 2)+ ", ")) + ", " +
694 "evictions=" + stats.getEvictionCount() + ", " +
695 "evicted=" + stats.getEvictedCount() + ", " +
696 "evictedPerRun=" + stats.evictedPerEviction());
697 }
698
699
700
701
702
703
704
705 public CacheStats getStats() {
706 return this.stats;
707 }
708
709 public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
710 (3 * Bytes.SIZEOF_LONG) + (8 * ClassSize.REFERENCE) +
711 (5 * Bytes.SIZEOF_FLOAT) + Bytes.SIZEOF_BOOLEAN
712 + ClassSize.OBJECT);
713
714
715 public long heapSize() {
716 return getCurrentSize();
717 }
718
719 public static long calculateOverhead(long maxSize, long blockSize, int concurrency){
720
721 return CACHE_FIXED_OVERHEAD + ClassSize.CONCURRENT_HASHMAP +
722 ((long)Math.ceil(maxSize*1.2/blockSize)
723 * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
724 (concurrency * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
725 }
726
727 @Override
728 public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries(Configuration conf) throws IOException {
729
730 Map<String, Path> sfMap = FSUtils.getTableStoreFilePathMap(
731 FileSystem.get(conf),
732 FSUtils.getRootDir(conf));
733
734
735
736 Map<BlockCacheColumnFamilySummary, BlockCacheColumnFamilySummary> bcs =
737 new HashMap<BlockCacheColumnFamilySummary, BlockCacheColumnFamilySummary>();
738
739 for (CachedBlock cb : map.values()) {
740 String sf = cb.getCacheKey().getHfileName();
741 Path path = sfMap.get(sf);
742 if ( path != null) {
743 BlockCacheColumnFamilySummary lookup =
744 BlockCacheColumnFamilySummary.createFromStoreFilePath(path);
745 BlockCacheColumnFamilySummary bcse = bcs.get(lookup);
746 if (bcse == null) {
747 bcse = BlockCacheColumnFamilySummary.create(lookup);
748 bcs.put(lookup,bcse);
749 }
750 bcse.incrementBlocks();
751 bcse.incrementHeapSize(cb.heapSize());
752 }
753 }
754 List<BlockCacheColumnFamilySummary> list =
755 new ArrayList<BlockCacheColumnFamilySummary>(bcs.values());
756 Collections.sort( list );
757 return list;
758 }
759
760
761
762 private long acceptableSize() {
763 return (long)Math.floor(this.maxSize * this.acceptableFactor);
764 }
765 private long minSize() {
766 return (long)Math.floor(this.maxSize * this.minFactor);
767 }
768 private long singleSize() {
769 return (long)Math.floor(this.maxSize * this.singleFactor * this.minFactor);
770 }
771 private long multiSize() {
772 return (long)Math.floor(this.maxSize * this.multiFactor * this.minFactor);
773 }
774 private long memorySize() {
775 return (long)Math.floor(this.maxSize * this.memoryFactor * this.minFactor);
776 }
777
778 public void shutdown() {
779 this.scheduleThreadPool.shutdown();
780 for (int i = 0; i < 10; i++) {
781 if (!this.scheduleThreadPool.isShutdown()) Threads.sleep(10);
782 }
783 if (!this.scheduleThreadPool.isShutdown()) {
784 List<Runnable> runnables = this.scheduleThreadPool.shutdownNow();
785 LOG.debug("Still running " + runnables);
786 }
787 this.evictionThread.shutdown();
788 }
789
790
791 public void clearCache() {
792 map.clear();
793 }
794
795
796
797
798
799 SortedSet<String> getCachedFileNamesForTest() {
800 SortedSet<String> fileNames = new TreeSet<String>();
801 for (BlockCacheKey cacheKey : map.keySet()) {
802 fileNames.add(cacheKey.getHfileName());
803 }
804 return fileNames;
805 }
806
807 Map<BlockType, Integer> getBlockTypeCountsForTest() {
808 Map<BlockType, Integer> counts =
809 new EnumMap<BlockType, Integer>(BlockType.class);
810 for (CachedBlock cb : map.values()) {
811 BlockType blockType = ((HFileBlock) cb.getBuffer()).getBlockType();
812 Integer count = counts.get(blockType);
813 counts.put(blockType, (count == null ? 0 : count) + 1);
814 }
815 return counts;
816 }
817
818 public Map<DataBlockEncoding, Integer> getEncodingCountsForTest() {
819 Map<DataBlockEncoding, Integer> counts =
820 new EnumMap<DataBlockEncoding, Integer>(DataBlockEncoding.class);
821 for (BlockCacheKey cacheKey : map.keySet()) {
822 DataBlockEncoding encoding = cacheKey.getDataBlockEncoding();
823 Integer count = counts.get(encoding);
824 counts.put(encoding, (count == null ? 0 : count) + 1);
825 }
826 return counts;
827 }
828
829 }