1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.io.hfile.slab;
21
22 import java.math.BigDecimal;
23 import java.util.List;
24 import java.util.Map.Entry;
25 import java.util.TreeMap;
26 import java.util.concurrent.ConcurrentHashMap;
27 import java.util.concurrent.Executors;
28 import java.util.concurrent.ScheduledExecutorService;
29 import java.util.concurrent.TimeUnit;
30 import java.util.concurrent.atomic.AtomicLong;
31
32 import org.apache.commons.logging.Log;
33 import org.apache.commons.logging.LogFactory;
34 import org.apache.hadoop.classification.InterfaceAudience;
35 import org.apache.hadoop.conf.Configuration;
36 import org.apache.hadoop.hbase.io.HeapSize;
37 import org.apache.hadoop.hbase.io.hfile.BlockCache;
38 import org.apache.hadoop.hbase.io.hfile.BlockCacheColumnFamilySummary;
39 import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
40 import org.apache.hadoop.hbase.io.hfile.CacheStats;
41 import org.apache.hadoop.hbase.io.hfile.Cacheable;
42 import org.apache.hadoop.hbase.util.ClassSize;
43 import org.apache.hadoop.hbase.util.HasThread;
44 import org.apache.hadoop.util.StringUtils;
45
46 import com.google.common.base.Preconditions;
47 import com.google.common.util.concurrent.ThreadFactoryBuilder;
48
49
50
51
52
53
54
55 @InterfaceAudience.Private
56 public class SlabCache implements SlabItemActionWatcher, BlockCache, HeapSize {
57
58 private final ConcurrentHashMap<BlockCacheKey, SingleSizeCache> backingStore;
59 private final TreeMap<Integer, SingleSizeCache> sizer;
60 static final Log LOG = LogFactory.getLog(SlabCache.class);
61 static final int STAT_THREAD_PERIOD_SECS = 60 * 5;
62
63 private final ScheduledExecutorService scheduleThreadPool = Executors.newScheduledThreadPool(1,
64 new ThreadFactoryBuilder().setDaemon(true).setNameFormat("Slab Statistics #%d").build());
65
66 long size;
67 private final CacheStats stats;
68 final SlabStats requestStats;
69 final SlabStats successfullyCachedStats;
70 private final long avgBlockSize;
71 private static final long CACHE_FIXED_OVERHEAD = ClassSize.estimateBase(
72 SlabCache.class, false);
73
74
75
76
77
78
79
80
81 public SlabCache(long size, long avgBlockSize) {
82 this.avgBlockSize = avgBlockSize;
83 this.size = size;
84 this.stats = new CacheStats();
85 this.requestStats = new SlabStats();
86 this.successfullyCachedStats = new SlabStats();
87
88 backingStore = new ConcurrentHashMap<BlockCacheKey, SingleSizeCache>();
89 sizer = new TreeMap<Integer, SingleSizeCache>();
90 this.scheduleThreadPool.scheduleAtFixedRate(new StatisticsThread(this),
91 STAT_THREAD_PERIOD_SECS, STAT_THREAD_PERIOD_SECS, TimeUnit.SECONDS);
92
93 }
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109 public void addSlabByConf(Configuration conf) {
110
111 String[] porportions = conf.getStrings(
112 "hbase.offheapcache.slab.proportions", "0.80", "0.20");
113 String[] sizes = conf.getStrings("hbase.offheapcache.slab.sizes",
114 Long.valueOf(avgBlockSize * 11 / 10).toString(),
115 Long.valueOf(avgBlockSize * 21 / 10).toString());
116
117 if (porportions.length != sizes.length) {
118 throw new IllegalArgumentException(
119 "SlabCache conf not "
120 + "initialized, error in configuration. hbase.offheap.slab.proportions specifies "
121 + porportions.length
122 + " slabs while hbase.offheap.slab.sizes specifies "
123 + sizes.length + " slabs "
124 + "offheapslabporportions and offheapslabsizes");
125 }
126
127
128
129
130 BigDecimal[] parsedProportions = stringArrayToBigDecimalArray(porportions);
131 BigDecimal[] parsedSizes = stringArrayToBigDecimalArray(sizes);
132
133 BigDecimal sumProportions = new BigDecimal(0);
134 for (BigDecimal b : parsedProportions) {
135
136 Preconditions
137 .checkArgument(b.compareTo(BigDecimal.ZERO) == 1,
138 "Proportions in hbase.offheap.slab.proportions must be greater than 0!");
139 sumProportions = sumProportions.add(b);
140 }
141
142
143 Preconditions
144 .checkArgument(sumProportions.compareTo(BigDecimal.ONE) != 1,
145 "Sum of all proportions in hbase.offheap.slab.proportions must be less than 1");
146
147
148 if (sumProportions.compareTo(new BigDecimal("0.99")) == -1) {
149 LOG.warn("Sum of hbase.offheap.slab.proportions is less than 0.99! Memory is being wasted");
150 }
151 for (int i = 0; i < parsedProportions.length; i++) {
152 int blockSize = parsedSizes[i].intValue();
153 int numBlocks = new BigDecimal(this.size).multiply(parsedProportions[i])
154 .divide(parsedSizes[i], BigDecimal.ROUND_DOWN).intValue();
155 addSlab(blockSize, numBlocks);
156 }
157 }
158
159
160
161
162
163
164
165
166
167
168 Entry<Integer, SingleSizeCache> getHigherBlock(int size) {
169 return sizer.higherEntry(size - 1);
170 }
171
172 private BigDecimal[] stringArrayToBigDecimalArray(String[] parsee) {
173 BigDecimal[] parsed = new BigDecimal[parsee.length];
174 for (int i = 0; i < parsee.length; i++) {
175 parsed[i] = new BigDecimal(parsee[i].trim());
176 }
177 return parsed;
178 }
179
180 private void addSlab(int blockSize, int numBlocks) {
181 LOG.info("Creating a slab of blockSize " + blockSize + " with " + numBlocks
182 + " blocks, " + StringUtils.humanReadableInt(blockSize * (long) numBlocks) + "bytes.");
183 sizer.put(blockSize, new SingleSizeCache(blockSize, numBlocks, this));
184 }
185
186
187
188
189
190
191
192
193
194
195
196
197
198 public void cacheBlock(BlockCacheKey cacheKey, Cacheable cachedItem) {
199 Entry<Integer, SingleSizeCache> scacheEntry = getHigherBlock(cachedItem
200 .getSerializedLength());
201
202 this.requestStats.addin(cachedItem.getSerializedLength());
203
204 if (scacheEntry == null) {
205 return;
206 }
207
208 this.successfullyCachedStats.addin(cachedItem.getSerializedLength());
209 SingleSizeCache scache = scacheEntry.getValue();
210
211
212
213
214
215 scache.cacheBlock(cacheKey, cachedItem);
216 }
217
218
219
220
221
222 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) {
223 cacheBlock(cacheKey, buf);
224 }
225
226 public CacheStats getStats() {
227 return this.stats;
228 }
229
230
231
232
233
234
235 public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat) {
236 SingleSizeCache cachedBlock = backingStore.get(key);
237 if (cachedBlock == null) {
238 if (!repeat) stats.miss(caching);
239 return null;
240 }
241
242 Cacheable contentBlock = cachedBlock.getBlock(key, caching, false);
243
244 if (contentBlock != null) {
245 stats.hit(caching);
246 } else if (!repeat) {
247 stats.miss(caching);
248 }
249 return contentBlock;
250 }
251
252
253
254
255
256 public boolean evictBlock(BlockCacheKey cacheKey) {
257 SingleSizeCache cacheEntry = backingStore.get(cacheKey);
258 if (cacheEntry == null) {
259 return false;
260 } else {
261 cacheEntry.evictBlock(cacheKey);
262 return true;
263 }
264 }
265
266 @Override
267 public void onEviction(BlockCacheKey key, SingleSizeCache notifier) {
268 stats.evicted();
269 backingStore.remove(key);
270 }
271
272 @Override
273 public void onInsertion(BlockCacheKey key, SingleSizeCache notifier) {
274 backingStore.put(key, notifier);
275 }
276
277
278
279
280
281
282 public void shutdown() {
283 for (SingleSizeCache s : sizer.values()) {
284 s.shutdown();
285 }
286 this.scheduleThreadPool.shutdown();
287 }
288
289 public long heapSize() {
290 long childCacheSize = 0;
291 for (SingleSizeCache s : sizer.values()) {
292 childCacheSize += s.heapSize();
293 }
294 return SlabCache.CACHE_FIXED_OVERHEAD + childCacheSize;
295 }
296
297 public long size() {
298 return this.size;
299 }
300
301 public long getFreeSize() {
302 long childFreeSize = 0;
303 for (SingleSizeCache s : sizer.values()) {
304 childFreeSize += s.getFreeSize();
305 }
306 return childFreeSize;
307 }
308
309 @Override
310 public long getBlockCount() {
311 long count = 0;
312 for (SingleSizeCache cache : sizer.values()) {
313 count += cache.getBlockCount();
314 }
315 return count;
316 }
317
318 public long getCurrentSize() {
319 return size;
320 }
321
322 public long getEvictedCount() {
323 return stats.getEvictedCount();
324 }
325
326
327
328
329 static class StatisticsThread extends HasThread {
330 SlabCache ourcache;
331
332 public StatisticsThread(SlabCache slabCache) {
333 super("SlabCache.StatisticsThread");
334 setDaemon(true);
335 this.ourcache = slabCache;
336 }
337
338 @Override
339 public void run() {
340 for (SingleSizeCache s : ourcache.sizer.values()) {
341 s.logStats();
342 }
343
344 SlabCache.LOG.info("Current heap size is: "
345 + StringUtils.humanReadableInt(ourcache.heapSize()));
346
347 LOG.info("Request Stats");
348 ourcache.requestStats.logStats();
349 LOG.info("Successfully Cached Stats");
350 ourcache.successfullyCachedStats.logStats();
351 }
352
353 }
354
355
356
357
358
359
360 static class SlabStats {
361
362
363
364 static final int MULTIPLIER = 10;
365 final int NUMDIVISIONS = (int) (Math.log(Integer.MAX_VALUE) * MULTIPLIER);
366 private final AtomicLong[] counts = new AtomicLong[NUMDIVISIONS];
367
368 public SlabStats() {
369 for (int i = 0; i < NUMDIVISIONS; i++) {
370 counts[i] = new AtomicLong();
371 }
372 }
373
374 public void addin(int size) {
375 int index = (int) (Math.log(size) * MULTIPLIER);
376 counts[index].incrementAndGet();
377 }
378
379 public AtomicLong[] getUsage() {
380 return counts;
381 }
382
383 double getUpperBound(int index) {
384 return Math.pow(Math.E, ((index + 0.5) / MULTIPLIER));
385 }
386
387 double getLowerBound(int index) {
388 return Math.pow(Math.E, ((index - 0.5) / MULTIPLIER));
389 }
390
391 public void logStats() {
392 AtomicLong[] fineGrainedStats = getUsage();
393 for (int i = 0; i < fineGrainedStats.length; i++) {
394
395 if (fineGrainedStats[i].get() > 0) {
396 SlabCache.LOG.info("From "
397 + StringUtils.humanReadableInt((long) getLowerBound(i)) + "- "
398 + StringUtils.humanReadableInt((long) getUpperBound(i)) + ": "
399 + StringUtils.humanReadableInt(fineGrainedStats[i].get())
400 + " requests");
401
402 }
403 }
404 }
405 }
406
407 public int evictBlocksByHfileName(String hfileName) {
408 int numEvicted = 0;
409 for (BlockCacheKey key : backingStore.keySet()) {
410 if (key.getHfileName().equals(hfileName)) {
411 if (evictBlock(key))
412 ++numEvicted;
413 }
414 }
415 return numEvicted;
416 }
417
418
419
420
421
422 @Override
423 public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries(
424 Configuration conf) {
425 throw new UnsupportedOperationException();
426 }
427
428 }