View Javadoc

1   /**
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  package org.apache.hadoop.hbase.io.hfile;
20  
21  import static org.junit.Assert.assertEquals;
22  import static org.junit.Assert.assertTrue;
23  
24  import java.nio.ByteBuffer;
25  import java.util.Random;
26  
27  import org.apache.hadoop.hbase.SmallTests;
28  import org.apache.hadoop.hbase.io.HeapSize;
29  import org.apache.hadoop.hbase.io.hfile.LruBlockCache.EvictionThread;
30  import org.apache.hadoop.hbase.util.ClassSize;
31  import org.junit.Test;
32  import org.junit.experimental.categories.Category;
33  
34  /**
35   * Tests the concurrent LruBlockCache.<p>
36   *
37   * Tests will ensure it grows and shrinks in size properly,
38   * evictions run when they're supposed to and do what they should,
39   * and that cached blocks are accessible when expected to be.
40   */
41  @Category(SmallTests.class)
42  public class TestLruBlockCache {
43  
44  
45    @Test
46    public void testBackgroundEvictionThread() throws Exception {
47      long maxSize = 100000;
48      int numBlocks = 9;
49      long blockSize = calculateBlockSizeDefault(maxSize, numBlocks);
50      assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize);
51  
52      LruBlockCache cache = new LruBlockCache(maxSize,blockSize);
53      EvictionThread evictionThread = cache.getEvictionThread();
54      assertTrue(evictionThread != null);
55  
56      CachedItem[] blocks = generateFixedBlocks(numBlocks + 1, blockSize, "block");
57  
58      // Make sure eviction thread has entered run method
59      while (!evictionThread.isEnteringRun()) {
60        Thread.sleep(1);
61      }
62  
63      // Add all the blocks
64      for (CachedItem block : blocks) {
65        cache.cacheBlock(block.cacheKey, block);
66      }
67  
68      // wait until at least one eviction has run
69      int n = 0;
70      while(cache.getEvictionCount() == 0) {
71        Thread.sleep(200);
72        assertTrue("Eviction never happened.", n++ < 20);
73      }
74  
75      // let cache stabilize
76      // On some systems, the cache will run multiple evictions before it attains
77      // steady-state. For instance, after populating the cache with 10 blocks,
78      // the first eviction evicts a single block and then a second eviction
79      // evicts another. I think this is due to the delta between minSize and
80      // acceptableSize, combined with variance between object overhead on
81      // different environments.
82      n = 0;
83      for (long prevCnt = 0 /* < number of blocks added */,
84                curCnt = cache.getBlockCount();
85          prevCnt != curCnt; prevCnt = curCnt, curCnt = cache.getBlockCount()) {
86        Thread.sleep(200);
87        assertTrue("Cache never stabilized.", n++ < 20);
88      }
89  
90      long evictionCount = cache.getEvictionCount();
91      assertTrue(evictionCount >= 1);
92      System.out.println("Background Evictions run: " + evictionCount);
93    }
94  
95    @Test
96    public void testCacheSimple() throws Exception {
97  
98      long maxSize = 1000000;
99      long blockSize = calculateBlockSizeDefault(maxSize, 101);
100 
101     LruBlockCache cache = new LruBlockCache(maxSize, blockSize);
102 
103     CachedItem [] blocks = generateRandomBlocks(100, blockSize);
104 
105     long expectedCacheSize = cache.heapSize();
106 
107     // Confirm empty
108     for (CachedItem block : blocks) {
109       assertTrue(cache.getBlock(block.cacheKey, true, false) == null);
110     }
111 
112     // Add blocks
113     for (CachedItem block : blocks) {
114       cache.cacheBlock(block.cacheKey, block);
115       expectedCacheSize += block.cacheBlockHeapSize();
116     }
117 
118     // Verify correctly calculated cache heap size
119     assertEquals(expectedCacheSize, cache.heapSize());
120 
121     // Check if all blocks are properly cached and retrieved
122     for (CachedItem block : blocks) {
123       HeapSize buf = cache.getBlock(block.cacheKey, true, false);
124       assertTrue(buf != null);
125       assertEquals(buf.heapSize(), block.heapSize());
126     }
127 
128     // Verify correctly calculated cache heap size
129     assertEquals(expectedCacheSize, cache.heapSize());
130 
131     // Check if all blocks are properly cached and retrieved
132     for (CachedItem block : blocks) {
133       HeapSize buf = cache.getBlock(block.cacheKey, true, false);
134       assertTrue(buf != null);
135       assertEquals(buf.heapSize(), block.heapSize());
136     }
137 
138     // Expect no evictions
139     assertEquals(0, cache.getEvictionCount());
140     Thread t = new LruBlockCache.StatisticsThread(cache);
141     t.start();
142     t.join();
143   }
144 
145   @Test
146   public void testCacheEvictionSimple() throws Exception {
147 
148     long maxSize = 100000;
149     long blockSize = calculateBlockSizeDefault(maxSize, 10);
150 
151     LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false);
152 
153     CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block");
154 
155     long expectedCacheSize = cache.heapSize();
156 
157     // Add all the blocks
158     for (CachedItem block : blocks) {
159       cache.cacheBlock(block.cacheKey, block);
160       expectedCacheSize += block.cacheBlockHeapSize();
161     }
162 
163     // A single eviction run should have occurred
164     assertEquals(1, cache.getEvictionCount());
165 
166     // Our expected size overruns acceptable limit
167     assertTrue(expectedCacheSize >
168       (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
169 
170     // But the cache did not grow beyond max
171     assertTrue(cache.heapSize() < maxSize);
172 
173     // And is still below the acceptable limit
174     assertTrue(cache.heapSize() <
175         (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
176 
177     // All blocks except block 0  should be in the cache
178     assertTrue(cache.getBlock(blocks[0].cacheKey, true, false) == null);
179     for(int i=1;i<blocks.length;i++) {
180       assertEquals(cache.getBlock(blocks[i].cacheKey, true, false),
181           blocks[i]);
182     }
183   }
184 
185   @Test
186   public void testCacheEvictionTwoPriorities() throws Exception {
187 
188     long maxSize = 100000;
189     long blockSize = calculateBlockSizeDefault(maxSize, 10);
190 
191     LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false);
192 
193     CachedItem [] singleBlocks = generateFixedBlocks(5, 10000, "single");
194     CachedItem [] multiBlocks = generateFixedBlocks(5, 10000, "multi");
195 
196     long expectedCacheSize = cache.heapSize();
197 
198     // Add and get the multi blocks
199     for (CachedItem block : multiBlocks) {
200       cache.cacheBlock(block.cacheKey, block);
201       expectedCacheSize += block.cacheBlockHeapSize();
202       assertEquals(cache.getBlock(block.cacheKey, true, false), block);
203     }
204 
205     // Add the single blocks (no get)
206     for (CachedItem block : singleBlocks) {
207       cache.cacheBlock(block.cacheKey, block);
208       expectedCacheSize += block.heapSize();
209     }
210 
211     // A single eviction run should have occurred
212     assertEquals(cache.getEvictionCount(), 1);
213 
214     // We expect two entries evicted
215     assertEquals(cache.getEvictedCount(), 2);
216 
217     // Our expected size overruns acceptable limit
218     assertTrue(expectedCacheSize >
219       (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
220 
221     // But the cache did not grow beyond max
222     assertTrue(cache.heapSize() <= maxSize);
223 
224     // And is now below the acceptable limit
225     assertTrue(cache.heapSize() <=
226         (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
227 
228     // We expect fairness across the two priorities.
229     // This test makes multi go barely over its limit, in-memory
230     // empty, and the rest in single.  Two single evictions and
231     // one multi eviction expected.
232     assertTrue(cache.getBlock(singleBlocks[0].cacheKey, true, false) == null);
233     assertTrue(cache.getBlock(multiBlocks[0].cacheKey, true, false) == null);
234 
235     // And all others to be cached
236     for(int i=1;i<4;i++) {
237       assertEquals(cache.getBlock(singleBlocks[i].cacheKey, true, false),
238           singleBlocks[i]);
239       assertEquals(cache.getBlock(multiBlocks[i].cacheKey, true, false),
240           multiBlocks[i]);
241     }
242   }
243 
244   @Test
245   public void testCacheEvictionThreePriorities() throws Exception {
246 
247     long maxSize = 100000;
248     long blockSize = calculateBlockSize(maxSize, 10);
249 
250     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
251         (int)Math.ceil(1.2*maxSize/blockSize),
252         LruBlockCache.DEFAULT_LOAD_FACTOR,
253         LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
254         0.98f, // min
255         0.99f, // acceptable
256         0.33f, // single
257         0.33f, // multi
258         0.34f);// memory
259 
260 
261     CachedItem [] singleBlocks = generateFixedBlocks(5, blockSize, "single");
262     CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
263     CachedItem [] memoryBlocks = generateFixedBlocks(5, blockSize, "memory");
264 
265     long expectedCacheSize = cache.heapSize();
266 
267     // Add 3 blocks from each priority
268     for(int i=0;i<3;i++) {
269 
270       // Just add single blocks
271       cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
272       expectedCacheSize += singleBlocks[i].cacheBlockHeapSize();
273 
274       // Add and get multi blocks
275       cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]);
276       expectedCacheSize += multiBlocks[i].cacheBlockHeapSize();
277       cache.getBlock(multiBlocks[i].cacheKey, true, false);
278 
279       // Add memory blocks as such
280       cache.cacheBlock(memoryBlocks[i].cacheKey, memoryBlocks[i], true);
281       expectedCacheSize += memoryBlocks[i].cacheBlockHeapSize();
282 
283     }
284 
285     // Do not expect any evictions yet
286     assertEquals(0, cache.getEvictionCount());
287 
288     // Verify cache size
289     assertEquals(expectedCacheSize, cache.heapSize());
290 
291     // Insert a single block, oldest single should be evicted
292     cache.cacheBlock(singleBlocks[3].cacheKey, singleBlocks[3]);
293 
294     // Single eviction, one thing evicted
295     assertEquals(1, cache.getEvictionCount());
296     assertEquals(1, cache.getEvictedCount());
297 
298     // Verify oldest single block is the one evicted
299     assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false));
300 
301     // Change the oldest remaining single block to a multi
302     cache.getBlock(singleBlocks[1].cacheKey, true, false);
303 
304     // Insert another single block
305     cache.cacheBlock(singleBlocks[4].cacheKey, singleBlocks[4]);
306 
307     // Two evictions, two evicted.
308     assertEquals(2, cache.getEvictionCount());
309     assertEquals(2, cache.getEvictedCount());
310 
311     // Oldest multi block should be evicted now
312     assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false));
313 
314     // Insert another memory block
315     cache.cacheBlock(memoryBlocks[3].cacheKey, memoryBlocks[3], true);
316 
317     // Three evictions, three evicted.
318     assertEquals(3, cache.getEvictionCount());
319     assertEquals(3, cache.getEvictedCount());
320 
321     // Oldest memory block should be evicted now
322     assertEquals(null, cache.getBlock(memoryBlocks[0].cacheKey, true, false));
323 
324     // Add a block that is twice as big (should force two evictions)
325     CachedItem [] bigBlocks = generateFixedBlocks(3, blockSize*3, "big");
326     cache.cacheBlock(bigBlocks[0].cacheKey, bigBlocks[0]);
327 
328     // Four evictions, six evicted (inserted block 3X size, expect +3 evicted)
329     assertEquals(4, cache.getEvictionCount());
330     assertEquals(6, cache.getEvictedCount());
331 
332     // Expect three remaining singles to be evicted
333     assertEquals(null, cache.getBlock(singleBlocks[2].cacheKey, true, false));
334     assertEquals(null, cache.getBlock(singleBlocks[3].cacheKey, true, false));
335     assertEquals(null, cache.getBlock(singleBlocks[4].cacheKey, true, false));
336 
337     // Make the big block a multi block
338     cache.getBlock(bigBlocks[0].cacheKey, true, false);
339 
340     // Cache another single big block
341     cache.cacheBlock(bigBlocks[1].cacheKey, bigBlocks[1]);
342 
343     // Five evictions, nine evicted (3 new)
344     assertEquals(5, cache.getEvictionCount());
345     assertEquals(9, cache.getEvictedCount());
346 
347     // Expect three remaining multis to be evicted
348     assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false));
349     assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false));
350     assertEquals(null, cache.getBlock(multiBlocks[2].cacheKey, true, false));
351 
352     // Cache a big memory block
353     cache.cacheBlock(bigBlocks[2].cacheKey, bigBlocks[2], true);
354 
355     // Six evictions, twelve evicted (3 new)
356     assertEquals(6, cache.getEvictionCount());
357     assertEquals(12, cache.getEvictedCount());
358 
359     // Expect three remaining in-memory to be evicted
360     assertEquals(null, cache.getBlock(memoryBlocks[1].cacheKey, true, false));
361     assertEquals(null, cache.getBlock(memoryBlocks[2].cacheKey, true, false));
362     assertEquals(null, cache.getBlock(memoryBlocks[3].cacheKey, true, false));
363 
364 
365   }
366 
367   // test scan resistance
368   @Test
369   public void testScanResistance() throws Exception {
370 
371     long maxSize = 100000;
372     long blockSize = calculateBlockSize(maxSize, 10);
373 
374     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
375         (int)Math.ceil(1.2*maxSize/blockSize),
376         LruBlockCache.DEFAULT_LOAD_FACTOR,
377         LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
378         0.66f, // min
379         0.99f, // acceptable
380         0.33f, // single
381         0.33f, // multi
382         0.34f);// memory
383 
384     CachedItem [] singleBlocks = generateFixedBlocks(20, blockSize, "single");
385     CachedItem [] multiBlocks = generateFixedBlocks(5, blockSize, "multi");
386 
387     // Add 5 multi blocks
388     for (CachedItem block : multiBlocks) {
389       cache.cacheBlock(block.cacheKey, block);
390       cache.getBlock(block.cacheKey, true, false);
391     }
392 
393     // Add 5 single blocks
394     for(int i=0;i<5;i++) {
395       cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
396     }
397 
398     // An eviction ran
399     assertEquals(1, cache.getEvictionCount());
400 
401     // To drop down to 2/3 capacity, we'll need to evict 4 blocks
402     assertEquals(4, cache.getEvictedCount());
403 
404     // Should have been taken off equally from single and multi
405     assertEquals(null, cache.getBlock(singleBlocks[0].cacheKey, true, false));
406     assertEquals(null, cache.getBlock(singleBlocks[1].cacheKey, true, false));
407     assertEquals(null, cache.getBlock(multiBlocks[0].cacheKey, true, false));
408     assertEquals(null, cache.getBlock(multiBlocks[1].cacheKey, true, false));
409 
410     // Let's keep "scanning" by adding single blocks.  From here on we only
411     // expect evictions from the single bucket.
412 
413     // Every time we reach 10 total blocks (every 4 inserts) we get 4 single
414     // blocks evicted.  Inserting 13 blocks should yield 3 more evictions and
415     // 12 more evicted.
416 
417     for(int i=5;i<18;i++) {
418       cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
419     }
420 
421     // 4 total evictions, 16 total evicted
422     assertEquals(4, cache.getEvictionCount());
423     assertEquals(16, cache.getEvictedCount());
424 
425     // Should now have 7 total blocks
426     assertEquals(7, cache.size());
427 
428   }
429 
430   // test setMaxSize
431   @Test
432   public void testResizeBlockCache() throws Exception {
433 
434     long maxSize = 300000;
435     long blockSize = calculateBlockSize(maxSize, 31);
436 
437     LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false,
438         (int)Math.ceil(1.2*maxSize/blockSize),
439         LruBlockCache.DEFAULT_LOAD_FACTOR,
440         LruBlockCache.DEFAULT_CONCURRENCY_LEVEL,
441         0.98f, // min
442         0.99f, // acceptable
443         0.33f, // single
444         0.33f, // multi
445         0.34f);// memory
446 
447     CachedItem [] singleBlocks = generateFixedBlocks(10, blockSize, "single");
448     CachedItem [] multiBlocks = generateFixedBlocks(10, blockSize, "multi");
449     CachedItem [] memoryBlocks = generateFixedBlocks(10, blockSize, "memory");
450 
451     // Add all blocks from all priorities
452     for(int i=0;i<10;i++) {
453 
454       // Just add single blocks
455       cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]);
456 
457       // Add and get multi blocks
458       cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]);
459       cache.getBlock(multiBlocks[i].cacheKey, true, false);
460 
461       // Add memory blocks as such
462       cache.cacheBlock(memoryBlocks[i].cacheKey, memoryBlocks[i], true);
463     }
464 
465     // Do not expect any evictions yet
466     assertEquals(0, cache.getEvictionCount());
467 
468     // Resize to half capacity plus an extra block (otherwise we evict an extra)
469     cache.setMaxSize((long)(maxSize * 0.5f));
470 
471     // Should have run a single eviction
472     assertEquals(1, cache.getEvictionCount());
473 
474     // And we expect 1/2 of the blocks to be evicted
475     assertEquals(15, cache.getEvictedCount());
476 
477     // And the oldest 5 blocks from each category should be gone
478     for(int i=0;i<5;i++) {
479       assertEquals(null, cache.getBlock(singleBlocks[i].cacheKey, true, false));
480       assertEquals(null, cache.getBlock(multiBlocks[i].cacheKey, true, false));
481       assertEquals(null, cache.getBlock(memoryBlocks[i].cacheKey, true, false));
482     }
483 
484     // And the newest 5 blocks should still be accessible
485     for(int i=5;i<10;i++) {
486       assertEquals(singleBlocks[i], cache.getBlock(singleBlocks[i].cacheKey, true, false));
487       assertEquals(multiBlocks[i], cache.getBlock(multiBlocks[i].cacheKey, true, false));
488       assertEquals(memoryBlocks[i], cache.getBlock(memoryBlocks[i].cacheKey, true, false));
489     }
490   }
491 
492   // test metricsPastNPeriods
493   @Test
494   public void testPastNPeriodsMetrics() throws Exception {
495    double delta = 0.01;
496 
497     // 3 total periods
498     CacheStats stats = new CacheStats(3);
499 
500     // No accesses, should be 0
501     stats.rollMetricsPeriod();
502     assertEquals(0.0, stats.getHitRatioPastNPeriods(), delta);
503     assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
504 
505     // period 1, 1 hit caching, 1 hit non-caching, 2 miss non-caching
506     // should be (2/4)=0.5 and (1/1)=1
507     stats.hit(false);
508     stats.hit(true);
509     stats.miss(false);
510     stats.miss(false);
511     stats.rollMetricsPeriod();
512     assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
513     assertEquals(1.0, stats.getHitCachingRatioPastNPeriods(), delta);
514 
515     // period 2, 1 miss caching, 3 miss non-caching
516     // should be (2/8)=0.25 and (1/2)=0.5
517     stats.miss(true);
518     stats.miss(false);
519     stats.miss(false);
520     stats.miss(false);
521     stats.rollMetricsPeriod();
522     assertEquals(0.25, stats.getHitRatioPastNPeriods(), delta);
523     assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta);
524 
525     // period 3, 2 hits of each type
526     // should be (6/12)=0.5 and (3/4)=0.75
527     stats.hit(false);
528     stats.hit(true);
529     stats.hit(false);
530     stats.hit(true);
531     stats.rollMetricsPeriod();
532     assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
533     assertEquals(0.75, stats.getHitCachingRatioPastNPeriods(), delta);
534 
535     // period 4, evict period 1, two caching misses
536     // should be (4/10)=0.4 and (2/5)=0.4
537     stats.miss(true);
538     stats.miss(true);
539     stats.rollMetricsPeriod();
540     assertEquals(0.4, stats.getHitRatioPastNPeriods(), delta);
541     assertEquals(0.4, stats.getHitCachingRatioPastNPeriods(), delta);
542 
543     // period 5, evict period 2, 2 caching misses, 2 non-caching hit
544     // should be (6/10)=0.6 and (2/6)=1/3
545     stats.miss(true);
546     stats.miss(true);
547     stats.hit(false);
548     stats.hit(false);
549     stats.rollMetricsPeriod();
550     assertEquals(0.6, stats.getHitRatioPastNPeriods(), delta);
551     assertEquals((double)1/3, stats.getHitCachingRatioPastNPeriods(), delta);
552 
553     // period 6, evict period 3
554     // should be (2/6)=1/3 and (0/4)=0
555     stats.rollMetricsPeriod();
556     assertEquals((double)1/3, stats.getHitRatioPastNPeriods(), delta);
557     assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
558 
559     // period 7, evict period 4
560     // should be (2/4)=0.5 and (0/2)=0
561     stats.rollMetricsPeriod();
562     assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
563     assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
564 
565     // period 8, evict period 5
566     // should be 0 and 0
567     stats.rollMetricsPeriod();
568     assertEquals(0.0, stats.getHitRatioPastNPeriods(), delta);
569     assertEquals(0.0, stats.getHitCachingRatioPastNPeriods(), delta);
570 
571     // period 9, one of each
572     // should be (2/4)=0.5 and (1/2)=0.5
573     stats.miss(true);
574     stats.miss(false);
575     stats.hit(true);
576     stats.hit(false);
577     stats.rollMetricsPeriod();
578     assertEquals(0.5, stats.getHitRatioPastNPeriods(), delta);
579     assertEquals(0.5, stats.getHitCachingRatioPastNPeriods(), delta);
580   }
581 
582   private CachedItem [] generateFixedBlocks(int numBlocks, int size, String pfx) {
583     CachedItem [] blocks = new CachedItem[numBlocks];
584     for(int i=0;i<numBlocks;i++) {
585       blocks[i] = new CachedItem(pfx + i, size);
586     }
587     return blocks;
588   }
589 
590   private CachedItem [] generateFixedBlocks(int numBlocks, long size, String pfx) {
591     return generateFixedBlocks(numBlocks, (int)size, pfx);
592   }
593 
594   private CachedItem [] generateRandomBlocks(int numBlocks, long maxSize) {
595     CachedItem [] blocks = new CachedItem[numBlocks];
596     Random r = new Random();
597     for(int i=0;i<numBlocks;i++) {
598       blocks[i] = new CachedItem("block" + i, r.nextInt((int)maxSize)+1);
599     }
600     return blocks;
601   }
602 
603   private long calculateBlockSize(long maxSize, int numBlocks) {
604     long roughBlockSize = maxSize / numBlocks;
605     int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize);
606     long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD +
607         ClassSize.CONCURRENT_HASHMAP +
608         (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
609         (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
610     long negateBlockSize = (long)(totalOverhead/numEntries);
611     negateBlockSize += CachedBlock.PER_BLOCK_OVERHEAD;
612     return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*0.99f));
613   }
614 
615   private long calculateBlockSizeDefault(long maxSize, int numBlocks) {
616     long roughBlockSize = maxSize / numBlocks;
617     int numEntries = (int)Math.ceil((1.2)*maxSize/roughBlockSize);
618     long totalOverhead = LruBlockCache.CACHE_FIXED_OVERHEAD +
619         ClassSize.CONCURRENT_HASHMAP +
620         (numEntries * ClassSize.CONCURRENT_HASHMAP_ENTRY) +
621         (LruBlockCache.DEFAULT_CONCURRENCY_LEVEL * ClassSize.CONCURRENT_HASHMAP_SEGMENT);
622     long negateBlockSize = totalOverhead / numEntries;
623     negateBlockSize += CachedBlock.PER_BLOCK_OVERHEAD;
624     return ClassSize.align((long)Math.floor((roughBlockSize - negateBlockSize)*
625         LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR));
626   }
627 
628   private static class CachedItem implements Cacheable {
629     BlockCacheKey cacheKey;
630     int size;
631 
632     CachedItem(String blockName, int size) {
633       this.cacheKey = new BlockCacheKey(blockName, 0);
634       this.size = size;
635     }
636 
637     /** The size of this item reported to the block cache layer */
638     @Override
639     public long heapSize() {
640       return ClassSize.align(size);
641     }
642 
643     /** Size of the cache block holding this item. Used for verification. */
644     public long cacheBlockHeapSize() {
645       return CachedBlock.PER_BLOCK_OVERHEAD
646           + ClassSize.align(cacheKey.heapSize())
647           + ClassSize.align(size);
648     }
649 
650     @Override
651     public int getSerializedLength() {
652       return 0;
653     }
654 
655     @Override
656     public CacheableDeserializer<Cacheable> getDeserializer() {
657       return null;
658     }
659 
660     @Override
661     public void serialize(ByteBuffer destination) {
662     }
663     
664     @Override
665     public BlockType getBlockType() {
666       return BlockType.DATA;
667     }
668 
669   }
670 
671 }
672