1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.io.hfile.slab;
20
21 import java.nio.ByteBuffer;
22 import java.util.List;
23 import java.util.concurrent.ConcurrentMap;
24 import java.util.concurrent.atomic.AtomicLong;
25
26 import org.apache.commons.logging.Log;
27 import org.apache.commons.logging.LogFactory;
28 import org.apache.hadoop.classification.InterfaceAudience;
29 import org.apache.hadoop.conf.Configuration;
30 import org.apache.hadoop.hbase.io.HeapSize;
31 import org.apache.hadoop.hbase.io.hfile.BlockCache;
32 import org.apache.hadoop.hbase.io.hfile.BlockCacheColumnFamilySummary;
33 import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
34 import org.apache.hadoop.hbase.io.hfile.CacheStats;
35 import org.apache.hadoop.hbase.io.hfile.Cacheable;
36 import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
37 import org.apache.hadoop.hbase.util.Bytes;
38 import org.apache.hadoop.hbase.util.ClassSize;
39 import org.apache.hadoop.util.StringUtils;
40
41 import com.google.common.cache.CacheBuilder;
42 import com.google.common.cache.RemovalListener;
43 import com.google.common.cache.RemovalNotification;
44
45
46
47
48
49
50
51
52
53
54
55
56 @InterfaceAudience.Private
57 public class SingleSizeCache implements BlockCache, HeapSize {
58 private final Slab backingStore;
59 private final ConcurrentMap<BlockCacheKey, CacheablePair> backingMap;
60 private final int numBlocks;
61 private final int blockSize;
62 private final CacheStats stats;
63 private final SlabItemActionWatcher actionWatcher;
64 private final AtomicLong size;
65 private final AtomicLong timeSinceLastAccess;
66 public final static long CACHE_FIXED_OVERHEAD = ClassSize
67 .align((2 * Bytes.SIZEOF_INT) + (5 * ClassSize.REFERENCE)
68 + +ClassSize.OBJECT);
69
70 static final Log LOG = LogFactory.getLog(SingleSizeCache.class);
71
72
73
74
75
76
77
78
79
80
81
82
83 public SingleSizeCache(int blockSize, int numBlocks,
84 SlabItemActionWatcher master) {
85 this.blockSize = blockSize;
86 this.numBlocks = numBlocks;
87 backingStore = new Slab(blockSize, numBlocks);
88 this.stats = new CacheStats();
89 this.actionWatcher = master;
90 this.size = new AtomicLong(CACHE_FIXED_OVERHEAD + backingStore.heapSize());
91 this.timeSinceLastAccess = new AtomicLong();
92
93
94
95 RemovalListener<BlockCacheKey, CacheablePair> listener =
96 new RemovalListener<BlockCacheKey, CacheablePair>() {
97 @Override
98 public void onRemoval(
99 RemovalNotification<BlockCacheKey, CacheablePair> notification) {
100 if (!notification.wasEvicted()) {
101
102
103 return;
104 }
105 CacheablePair value = notification.getValue();
106 timeSinceLastAccess.set(System.nanoTime()
107 - value.recentlyAccessed.get());
108 stats.evict();
109 doEviction(notification.getKey(), value);
110 }
111 };
112
113 backingMap = CacheBuilder.newBuilder()
114 .maximumSize(numBlocks - 1)
115 .removalListener(listener)
116 .<BlockCacheKey, CacheablePair>build()
117 .asMap();
118 }
119
120 @Override
121 public void cacheBlock(BlockCacheKey blockName, Cacheable toBeCached) {
122 ByteBuffer storedBlock;
123
124 try {
125 storedBlock = backingStore.alloc(toBeCached.getSerializedLength());
126 } catch (InterruptedException e) {
127 LOG.warn("SlabAllocator was interrupted while waiting for block to become available");
128 LOG.warn(e);
129 return;
130 }
131
132 CacheablePair newEntry = new CacheablePair(toBeCached.getDeserializer(),
133 storedBlock);
134 toBeCached.serialize(storedBlock);
135
136 synchronized (this) {
137 CacheablePair alreadyCached = backingMap.putIfAbsent(blockName, newEntry);
138
139 if (alreadyCached != null) {
140 backingStore.free(storedBlock);
141 throw new RuntimeException("already cached " + blockName);
142 }
143 if (actionWatcher != null) {
144 actionWatcher.onInsertion(blockName, this);
145 }
146 }
147 newEntry.recentlyAccessed.set(System.nanoTime());
148 this.size.addAndGet(newEntry.heapSize());
149 }
150
151 @Override
152 public Cacheable getBlock(BlockCacheKey key, boolean caching, boolean repeat,
153 boolean updateCacheMetrics) {
154 CacheablePair contentBlock = backingMap.get(key);
155 if (contentBlock == null) {
156 if (!repeat && updateCacheMetrics) stats.miss(caching);
157 return null;
158 }
159
160 if (updateCacheMetrics) stats.hit(caching);
161
162 try {
163 contentBlock.recentlyAccessed.set(System.nanoTime());
164 synchronized (contentBlock) {
165 if (contentBlock.serializedData == null) {
166
167 LOG.warn("Concurrent eviction of " + key);
168 return null;
169 }
170 return contentBlock.deserializer
171 .deserialize(contentBlock.serializedData.asReadOnlyBuffer());
172 }
173 } catch (Throwable t) {
174 LOG.error("Deserializer threw an exception. This may indicate a bug.", t);
175 return null;
176 }
177 }
178
179
180
181
182
183
184
185 public boolean evictBlock(BlockCacheKey key) {
186 stats.evict();
187 CacheablePair evictedBlock = backingMap.remove(key);
188
189 if (evictedBlock != null) {
190 doEviction(key, evictedBlock);
191 }
192 return evictedBlock != null;
193 }
194
195 private void doEviction(BlockCacheKey key, CacheablePair evictedBlock) {
196 long evictedHeap = 0;
197 synchronized (evictedBlock) {
198 if (evictedBlock.serializedData == null) {
199
200 return;
201 }
202 evictedHeap = evictedBlock.heapSize();
203 ByteBuffer bb = evictedBlock.serializedData;
204 evictedBlock.serializedData = null;
205 backingStore.free(bb);
206
207
208
209
210
211
212
213
214
215
216
217
218 if (actionWatcher != null) {
219 actionWatcher.onEviction(key, this);
220 }
221 }
222 stats.evicted();
223 size.addAndGet(-1 * evictedHeap);
224 }
225
226 public void logStats() {
227
228 long milliseconds = this.timeSinceLastAccess.get() / 1000000;
229
230 LOG.info("For Slab of size " + this.blockSize + ": "
231 + this.getOccupiedSize() / this.blockSize
232 + " occupied, out of a capacity of " + this.numBlocks
233 + " blocks. HeapSize is "
234 + StringUtils.humanReadableInt(this.heapSize()) + " bytes." + ", "
235 + "churnTime=" + StringUtils.formatTime(milliseconds));
236
237 LOG.info("Slab Stats: " + "accesses="
238 + stats.getRequestCount()
239 + ", "
240 + "hits="
241 + stats.getHitCount()
242 + ", "
243 + "hitRatio="
244 + (stats.getHitCount() == 0 ? "0" : (StringUtils.formatPercent(
245 stats.getHitRatio(), 2) + "%, "))
246 + "cachingAccesses="
247 + stats.getRequestCachingCount()
248 + ", "
249 + "cachingHits="
250 + stats.getHitCachingCount()
251 + ", "
252 + "cachingHitsRatio="
253 + (stats.getHitCachingCount() == 0 ? "0" : (StringUtils.formatPercent(
254 stats.getHitCachingRatio(), 2) + "%, ")) + "evictions="
255 + stats.getEvictionCount() + ", " + "evicted="
256 + stats.getEvictedCount() + ", " + "evictedPerRun="
257 + stats.evictedPerEviction());
258
259 }
260
261 public void shutdown() {
262 backingStore.shutdown();
263 }
264
265 public long heapSize() {
266 return this.size.get() + backingStore.heapSize();
267 }
268
269 public long size() {
270 return (long) this.blockSize * (long) this.numBlocks;
271 }
272
273 public long getFreeSize() {
274 return (long) backingStore.getBlocksRemaining() * (long) blockSize;
275 }
276
277 public long getOccupiedSize() {
278 return (long) (numBlocks - backingStore.getBlocksRemaining()) * (long) blockSize;
279 }
280
281 public long getEvictedCount() {
282 return stats.getEvictedCount();
283 }
284
285 public CacheStats getStats() {
286 return this.stats;
287 }
288
289 @Override
290 public long getBlockCount() {
291 return numBlocks - backingStore.getBlocksRemaining();
292 }
293
294
295 @Override
296 public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) {
297 this.cacheBlock(cacheKey, buf);
298 }
299
300
301
302
303
304 @Override
305 public int evictBlocksByHfileName(String hfileName) {
306 int evictedCount = 0;
307 for (BlockCacheKey e : backingMap.keySet()) {
308 if (e.getHfileName().equals(hfileName)) {
309 this.evictBlock(e);
310 }
311 }
312 return evictedCount;
313 }
314
315 @Override
316 public long getCurrentSize() {
317 return 0;
318 }
319
320
321
322
323
324 @Override
325 public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries(
326 Configuration conf) {
327 throw new UnsupportedOperationException();
328 }
329
330
331 private static class CacheablePair implements HeapSize {
332 final CacheableDeserializer<Cacheable> deserializer;
333 ByteBuffer serializedData;
334 AtomicLong recentlyAccessed;
335
336 private CacheablePair(CacheableDeserializer<Cacheable> deserializer,
337 ByteBuffer serializedData) {
338 this.recentlyAccessed = new AtomicLong();
339 this.deserializer = deserializer;
340 this.serializedData = serializedData;
341 }
342
343
344
345
346
347
348 @Override
349 public long heapSize() {
350 return ClassSize.align(ClassSize.OBJECT + ClassSize.REFERENCE * 3
351 + ClassSize.ATOMIC_LONG);
352 }
353 }
354 }