1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.io.hfile.slab;
21
22 import org.apache.hadoop.hbase.MediumTests;
23 import org.apache.hadoop.hbase.io.hfile.CacheTestUtils;
24 import org.junit.After;
25 import org.junit.Before;
26 import org.junit.Test;
27 import org.junit.experimental.categories.Category;
28
29
30
31
32
33
34
35
36
37 @Category(MediumTests.class)
38 public class TestSingleSizeCache {
39 SingleSizeCache cache;
40 final int CACHE_SIZE = 1000000;
41 final int NUM_BLOCKS = 100;
42 final int BLOCK_SIZE = CACHE_SIZE / NUM_BLOCKS;
43 final int NUM_THREADS = 100;
44 final int NUM_QUERIES = 10000;
45
46 @Before
47 public void setup() {
48 cache = new SingleSizeCache(BLOCK_SIZE, NUM_BLOCKS, null);
49 }
50
51 @After
52 public void tearDown() {
53 cache.shutdown();
54 }
55
56 @Test
57 public void testCacheSimple() throws Exception {
58 CacheTestUtils.testCacheSimple(cache, BLOCK_SIZE, NUM_QUERIES);
59 }
60
61 @Test
62 public void testCacheMultiThreaded() throws Exception {
63 CacheTestUtils.testCacheMultiThreaded(cache, BLOCK_SIZE,
64 NUM_THREADS, NUM_QUERIES, 0.80);
65 }
66
67 @Test
68 public void testCacheMultiThreadedSingleKey() throws Exception {
69 CacheTestUtils.hammerSingleKey(cache, BLOCK_SIZE, NUM_THREADS, NUM_QUERIES);
70 }
71
72 @Test
73 public void testCacheMultiThreadedEviction() throws Exception {
74 CacheTestUtils.hammerEviction(cache, BLOCK_SIZE, NUM_THREADS, NUM_QUERIES);
75 }
76
77 @Test
78 public void testHeapSizeChanges(){
79 CacheTestUtils.testHeapSizeChanges(cache, BLOCK_SIZE);
80 }
81
82
83 @org.junit.Rule
84 public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
85 new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
86 }
87