1   /*
2    * Copyright 2009 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  
21  package org.apache.hadoop.hbase.io;
22  
23  import java.io.IOException;
24  import java.nio.ByteBuffer;
25  import java.util.ArrayList;
26  import java.util.TreeMap;
27  import java.util.concurrent.ConcurrentHashMap;
28  import java.util.concurrent.ConcurrentSkipListMap;
29  import java.util.concurrent.CopyOnWriteArrayList;
30  import java.util.concurrent.CopyOnWriteArraySet;
31  import java.util.concurrent.atomic.AtomicBoolean;
32  import java.util.concurrent.atomic.AtomicInteger;
33  import java.util.concurrent.atomic.AtomicLong;
34  import java.util.concurrent.locks.ReentrantReadWriteLock;
35  
36  import junit.framework.TestCase;
37  
38  import org.apache.commons.logging.Log;
39  import org.apache.commons.logging.LogFactory;
40  import org.apache.hadoop.hbase.KeyValue;
41  import org.apache.hadoop.hbase.SmallTests;
42  import org.apache.hadoop.hbase.client.Put;
43  import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
44  import org.apache.hadoop.hbase.io.hfile.CachedBlock;
45  import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
46  import org.apache.hadoop.hbase.regionserver.HRegion;
47  import org.apache.hadoop.hbase.regionserver.KeyValueSkipListSet;
48  import org.apache.hadoop.hbase.regionserver.MemStore;
49  import org.apache.hadoop.hbase.regionserver.Store;
50  import org.apache.hadoop.hbase.regionserver.TimeRangeTracker;
51  import org.apache.hadoop.hbase.regionserver.metrics.SchemaConfigured;
52  import org.apache.hadoop.hbase.util.Bytes;
53  import org.apache.hadoop.hbase.util.ClassSize;
54  import org.junit.experimental.categories.Category;
55  
56  /**
57   * Testing the sizing that HeapSize offers and compares to the size given by
58   * ClassSize.
59   */
60  @Category(SmallTests.class)
61  public class TestHeapSize extends TestCase {
62    static final Log LOG = LogFactory.getLog(TestHeapSize.class);
63    // List of classes implementing HeapSize
64    // BatchOperation, BatchUpdate, BlockIndex, Entry, Entry<K,V>, HStoreKey
65    // KeyValue, LruBlockCache, LruHashMap<K,V>, Put, HLogKey
66  
67    /**
68     * Test our hard-coded sizing of native java objects
69     */
70    public void testNativeSizes() throws IOException {
71      @SuppressWarnings("rawtypes")
72      Class cl = null;
73      long expected = 0L;
74      long actual = 0L;
75  
76      // ArrayList
77      cl = ArrayList.class;
78      expected = ClassSize.estimateBase(cl, false);
79      actual = ClassSize.ARRAYLIST;
80      if(expected != actual) {
81        ClassSize.estimateBase(cl, true);
82        assertEquals(expected, actual);
83      }
84  
85      // ByteBuffer
86      cl = ByteBuffer.class;
87      expected = ClassSize.estimateBase(cl, false);
88      actual = ClassSize.BYTE_BUFFER;
89      if(expected != actual) {
90        ClassSize.estimateBase(cl, true);
91        assertEquals(expected, actual);
92      }
93  
94      // Integer
95      cl = Integer.class;
96      expected = ClassSize.estimateBase(cl, false);
97      actual = ClassSize.INTEGER;
98      if(expected != actual) {
99        ClassSize.estimateBase(cl, true);
100       assertEquals(expected, actual);
101     }
102 
103     // Map.Entry
104     // Interface is public, all others are not.  Hard to size via ClassSize
105 //    cl = Map.Entry.class;
106 //    expected = ClassSize.estimateBase(cl, false);
107 //    actual = ClassSize.MAP_ENTRY;
108 //    if(expected != actual) {
109 //      ClassSize.estimateBase(cl, true);
110 //      assertEquals(expected, actual);
111 //    }
112 
113     // Object
114     cl = Object.class;
115     expected = ClassSize.estimateBase(cl, false);
116     actual = ClassSize.OBJECT;
117     if(expected != actual) {
118       ClassSize.estimateBase(cl, true);
119       assertEquals(expected, actual);
120     }
121 
122     // TreeMap
123     cl = TreeMap.class;
124     expected = ClassSize.estimateBase(cl, false);
125     actual = ClassSize.TREEMAP;
126     if(expected != actual) {
127       ClassSize.estimateBase(cl, true);
128       assertEquals(expected, actual);
129     }
130 
131     // String
132     cl = String.class;
133     expected = ClassSize.estimateBase(cl, false);
134     actual = ClassSize.STRING;
135     if(expected != actual) {
136       ClassSize.estimateBase(cl, true);
137       assertEquals(expected, actual);
138     }
139 
140     // ConcurrentHashMap
141     cl = ConcurrentHashMap.class;
142     expected = ClassSize.estimateBase(cl, false);
143     actual = ClassSize.CONCURRENT_HASHMAP;
144     if(expected != actual) {
145       ClassSize.estimateBase(cl, true);
146       assertEquals(expected, actual);
147     }
148 
149     // ConcurrentSkipListMap
150     cl = ConcurrentSkipListMap.class;
151     expected = ClassSize.estimateBase(cl, false);
152     actual = ClassSize.CONCURRENT_SKIPLISTMAP;
153     if(expected != actual) {
154       ClassSize.estimateBase(cl, true);
155       assertEquals(expected, actual);
156     }
157 
158     // ReentrantReadWriteLock
159     cl = ReentrantReadWriteLock.class;
160     expected = ClassSize.estimateBase(cl, false);
161     actual = ClassSize.REENTRANT_LOCK;
162     if(expected != actual) {
163       ClassSize.estimateBase(cl, true);
164       assertEquals(expected, actual);
165     }
166 
167     // AtomicLong
168     cl = AtomicLong.class;
169     expected = ClassSize.estimateBase(cl, false);
170     actual = ClassSize.ATOMIC_LONG;
171     if(expected != actual) {
172       ClassSize.estimateBase(cl, true);
173       assertEquals(expected, actual);
174     }
175 
176     // AtomicInteger
177     cl = AtomicInteger.class;
178     expected = ClassSize.estimateBase(cl, false);
179     actual = ClassSize.ATOMIC_INTEGER;
180     if(expected != actual) {
181       ClassSize.estimateBase(cl, true);
182       assertEquals(expected, actual);
183     }
184 
185     // AtomicBoolean
186     cl = AtomicBoolean.class;
187     expected = ClassSize.estimateBase(cl, false);
188     actual = ClassSize.ATOMIC_BOOLEAN;
189     if(expected != actual) {
190       ClassSize.estimateBase(cl, true);
191       assertEquals(expected, actual);
192     }
193 
194     // CopyOnWriteArraySet
195     cl = CopyOnWriteArraySet.class;
196     expected = ClassSize.estimateBase(cl, false);
197     actual = ClassSize.COPYONWRITE_ARRAYSET;
198     if(expected != actual) {
199       ClassSize.estimateBase(cl, true);
200       assertEquals(expected, actual);
201     }
202 
203     // CopyOnWriteArrayList
204     cl = CopyOnWriteArrayList.class;
205     expected = ClassSize.estimateBase(cl, false);
206     actual = ClassSize.COPYONWRITE_ARRAYLIST;
207     if(expected != actual) {
208       ClassSize.estimateBase(cl, true);
209       assertEquals(expected, actual);
210     }
211 
212     // TimeRangeTracker
213     cl = TimeRangeTracker.class;
214     expected = ClassSize.estimateBase(cl, false);
215     actual = ClassSize.TIMERANGE_TRACKER;
216     if (expected != actual) {
217       ClassSize.estimateBase(cl, true);
218       assertEquals(expected, actual);
219     }
220 
221     // KeyValueSkipListSet
222     cl = KeyValueSkipListSet.class;
223     expected = ClassSize.estimateBase(cl, false);
224     actual = ClassSize.KEYVALUE_SKIPLIST_SET;
225     if (expected != actual) {
226       ClassSize.estimateBase(cl, true);
227       assertEquals(expected, actual);
228     }
229   }
230 
231   /**
232    * Testing the classes that implements HeapSize and are a part of 0.20.
233    * Some are not tested here for example BlockIndex which is tested in
234    * TestHFile since it is a non public class
235    * @throws IOException
236    */
237   public void testSizes() throws IOException {
238     @SuppressWarnings("rawtypes")
239     Class cl = null;
240     long expected = 0L;
241     long actual = 0L;
242 
243     //KeyValue
244     cl = KeyValue.class;
245     expected = ClassSize.estimateBase(cl, false);
246     KeyValue kv = new KeyValue();
247     actual = kv.heapSize();
248     if(expected != actual) {
249       ClassSize.estimateBase(cl, true);
250       assertEquals(expected, actual);
251     }
252 
253     //Put
254     cl = Put.class;
255     expected = ClassSize.estimateBase(cl, false);
256     //The actual TreeMap is not included in the above calculation
257     expected += ClassSize.TREEMAP;
258     Put put = new Put(Bytes.toBytes(""));
259     actual = put.heapSize();
260     if(expected != actual) {
261       ClassSize.estimateBase(cl, true);
262       assertEquals(expected, actual);
263     }
264 
265     //LruBlockCache Overhead
266     cl = LruBlockCache.class;
267     actual = LruBlockCache.CACHE_FIXED_OVERHEAD;
268     expected = ClassSize.estimateBase(cl, false);
269     if(expected != actual) {
270       ClassSize.estimateBase(cl, true);
271       assertEquals(expected, actual);
272     }
273 
274     // CachedBlock Fixed Overhead
275     // We really need "deep" sizing but ClassSize does not do this.
276     // Perhaps we should do all these more in this style....
277     cl = CachedBlock.class;
278     actual = CachedBlock.PER_BLOCK_OVERHEAD;
279     expected = ClassSize.estimateBase(cl, false);
280     expected += ClassSize.estimateBase(String.class, false);
281     expected += ClassSize.estimateBase(ByteBuffer.class, false);
282     if(expected != actual) {
283       ClassSize.estimateBase(cl, true);
284       ClassSize.estimateBase(String.class, true);
285       ClassSize.estimateBase(ByteBuffer.class, true);
286       assertEquals(expected, actual);
287     }
288 
289     // MemStore Overhead
290     cl = MemStore.class;
291     actual = MemStore.FIXED_OVERHEAD;
292     expected = ClassSize.estimateBase(cl, false);
293     if(expected != actual) {
294       ClassSize.estimateBase(cl, true);
295       assertEquals(expected, actual);
296     }
297 
298     // MemStore Deep Overhead
299     actual = MemStore.DEEP_OVERHEAD;
300     expected = ClassSize.estimateBase(cl, false);
301     expected += ClassSize.estimateBase(AtomicLong.class, false);
302     expected += (2 * ClassSize.estimateBase(KeyValueSkipListSet.class, false));
303     expected += (2 * ClassSize.estimateBase(ConcurrentSkipListMap.class, false));
304     expected += (2 * ClassSize.estimateBase(TimeRangeTracker.class, false));
305     if(expected != actual) {
306       ClassSize.estimateBase(cl, true);
307       ClassSize.estimateBase(AtomicLong.class, true);
308       ClassSize.estimateBase(KeyValueSkipListSet.class, true);
309       ClassSize.estimateBase(KeyValueSkipListSet.class, true);
310       ClassSize.estimateBase(ConcurrentSkipListMap.class, true);
311       ClassSize.estimateBase(ConcurrentSkipListMap.class, true);
312       ClassSize.estimateBase(TimeRangeTracker.class, true);
313       ClassSize.estimateBase(TimeRangeTracker.class, true);
314       assertEquals(expected, actual);
315     }
316 
317     // SchemaConfigured
318     LOG.debug("Heap size for: " + SchemaConfigured.class.getName());
319     SchemaConfigured sc = new SchemaConfigured(null, "myTable", "myCF");
320     assertEquals(ClassSize.estimateBase(SchemaConfigured.class, true),
321         sc.heapSize());
322 
323     // Store Overhead
324     cl = Store.class;
325     actual = Store.FIXED_OVERHEAD;
326     expected = ClassSize.estimateBase(cl, false);
327     if(expected != actual) {
328       ClassSize.estimateBase(cl, true);
329       assertEquals(expected, actual);
330     }
331 
332     // Region Overhead
333     cl = HRegion.class;
334     actual = HRegion.FIXED_OVERHEAD;
335     expected = ClassSize.estimateBase(cl, false);
336     if (expected != actual) {
337       ClassSize.estimateBase(cl, true);
338       assertEquals(expected, actual);
339     }
340 
341     // Block cache key overhead
342     cl = BlockCacheKey.class;
343     // Passing zero length file name, because estimateBase does not handle
344     // deep overhead.
345     actual = new BlockCacheKey("", 0).heapSize();
346     expected  = ClassSize.estimateBase(cl, false);
347     if (expected != actual) {
348       ClassSize.estimateBase(cl, true);
349       assertEquals(expected, actual);
350     }
351 
352     // Currently NOT testing Deep Overheads of many of these classes.
353     // Deep overheads cover a vast majority of stuff, but will not be 100%
354     // accurate because it's unclear when we're referencing stuff that's already
355     // accounted for.  But we have satisfied our two core requirements.
356     // Sizing is quite accurate now, and our tests will throw errors if
357     // any of these classes are modified without updating overhead sizes.
358   }
359 
360   @org.junit.Rule
361   public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
362     new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
363 }
364