View Javadoc

1   /*
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  
20  package org.apache.hadoop.hbase.io;
21  
22  import java.io.IOException;
23  import java.lang.management.ManagementFactory;
24  import java.lang.management.RuntimeMXBean;
25  import java.nio.ByteBuffer;
26  import java.util.ArrayList;
27  import java.util.Map;
28  import java.util.TreeMap;
29  import java.util.concurrent.ConcurrentHashMap;
30  import java.util.concurrent.ConcurrentSkipListMap;
31  import java.util.concurrent.CopyOnWriteArrayList;
32  import java.util.concurrent.CopyOnWriteArraySet;
33  import java.util.concurrent.atomic.AtomicBoolean;
34  import java.util.concurrent.atomic.AtomicInteger;
35  import java.util.concurrent.atomic.AtomicLong;
36  import java.util.concurrent.locks.ReentrantReadWriteLock;
37  
38  import org.apache.commons.logging.Log;
39  import org.apache.commons.logging.LogFactory;
40  import org.apache.hadoop.hbase.KeyValue;
41  import org.apache.hadoop.hbase.SmallTests;
42  import org.apache.hadoop.hbase.client.Delete;
43  import org.apache.hadoop.hbase.client.Increment;
44  import org.apache.hadoop.hbase.client.Put;
45  import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
46  import org.apache.hadoop.hbase.io.hfile.CachedBlock;
47  import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
48  import org.apache.hadoop.hbase.regionserver.HRegion;
49  import org.apache.hadoop.hbase.regionserver.HStore;
50  import org.apache.hadoop.hbase.regionserver.MemStore;
51  import org.apache.hadoop.hbase.util.ClassSize;
52  import org.junit.BeforeClass;
53  import org.junit.Test;
54  import org.junit.experimental.categories.Category;
55  
56  import static org.junit.Assert.assertEquals;
57  
58  /**
59   * Testing the sizing that HeapSize offers and compares to the size given by
60   * ClassSize.
61   */
62  @Category(SmallTests.class)
63  public class TestHeapSize  {
64    static final Log LOG = LogFactory.getLog(TestHeapSize.class);
65    // List of classes implementing HeapSize
66    // BatchOperation, BatchUpdate, BlockIndex, Entry, Entry<K,V>, HStoreKey
67    // KeyValue, LruBlockCache, LruHashMap<K,V>, Put, HLogKey
68    
69    @BeforeClass
70    public static void beforeClass() throws Exception {
71      // Print detail on jvm so we know what is different should below test fail.
72      RuntimeMXBean b = ManagementFactory.getRuntimeMXBean();
73      LOG.info("name=" + b.getName()); 
74      LOG.info("specname=" + b.getSpecName()); 
75      LOG.info("specvendor=" + b.getSpecVendor()); 
76      LOG.info("vmname=" + b.getVmName()); 
77      LOG.info("vmversion=" + b.getVmVersion()); 
78      LOG.info("vmvendor=" + b.getVmVendor()); 
79      Map<String, String> p = b.getSystemProperties();
80      LOG.info("properties=" + p);
81    }
82  
83    /**
84     * Test our hard-coded sizing of native java objects
85     */
86    @Test
87    public void testNativeSizes() throws IOException {
88      Class<?> cl;
89      long expected;
90      long actual;
91  
92      // ArrayList
93      cl = ArrayList.class;
94      expected = ClassSize.estimateBase(cl, false);
95      actual = ClassSize.ARRAYLIST;
96      if(expected != actual) {
97        ClassSize.estimateBase(cl, true);
98        assertEquals(expected, actual);
99      }
100 
101     // ByteBuffer
102     cl = ByteBuffer.class;
103     expected = ClassSize.estimateBase(cl, false);
104     actual = ClassSize.BYTE_BUFFER;
105     if(expected != actual) {
106       ClassSize.estimateBase(cl, true);
107       assertEquals(expected, actual);
108     }
109 
110     // Integer
111     cl = Integer.class;
112     expected = ClassSize.estimateBase(cl, false);
113     actual = ClassSize.INTEGER;
114     if(expected != actual) {
115       ClassSize.estimateBase(cl, true);
116       assertEquals(expected, actual);
117     }
118 
119     // Map.Entry
120     // Interface is public, all others are not.  Hard to size via ClassSize
121 //    cl = Map.Entry.class;
122 //    expected = ClassSize.estimateBase(cl, false);
123 //    actual = ClassSize.MAP_ENTRY;
124 //    if(expected != actual) {
125 //      ClassSize.estimateBase(cl, true);
126 //      assertEquals(expected, actual);
127 //    }
128 
129     // Object
130     cl = Object.class;
131     expected = ClassSize.estimateBase(cl, false);
132     actual = ClassSize.OBJECT;
133     if(expected != actual) {
134       ClassSize.estimateBase(cl, true);
135       assertEquals(expected, actual);
136     }
137 
138     // TreeMap
139     cl = TreeMap.class;
140     expected = ClassSize.estimateBase(cl, false);
141     actual = ClassSize.TREEMAP;
142     if(expected != actual) {
143       ClassSize.estimateBase(cl, true);
144       assertEquals(expected, actual);
145     }
146 
147     // String
148     cl = String.class;
149     expected = ClassSize.estimateBase(cl, false);
150     actual = ClassSize.STRING;
151     if(expected != actual) {
152       ClassSize.estimateBase(cl, true);
153       assertEquals(expected, actual);
154     }
155 
156     // ConcurrentHashMap
157     cl = ConcurrentHashMap.class;
158     expected = ClassSize.estimateBase(cl, false);
159     actual = ClassSize.CONCURRENT_HASHMAP;
160     if(expected != actual) {
161       ClassSize.estimateBase(cl, true);
162       assertEquals(expected, actual);
163     }
164 
165     // ConcurrentSkipListMap
166     cl = ConcurrentSkipListMap.class;
167     expected = ClassSize.estimateBase(cl, false);
168     actual = ClassSize.CONCURRENT_SKIPLISTMAP;
169     if(expected != actual) {
170       ClassSize.estimateBase(cl, true);
171       assertEquals(expected, actual);
172     }
173 
174     // ReentrantReadWriteLock
175     cl = ReentrantReadWriteLock.class;
176     expected = ClassSize.estimateBase(cl, false);
177     actual = ClassSize.REENTRANT_LOCK;
178     if(expected != actual) {
179       ClassSize.estimateBase(cl, true);
180       assertEquals(expected, actual);
181     }
182 
183     // AtomicLong
184     cl = AtomicLong.class;
185     expected = ClassSize.estimateBase(cl, false);
186     actual = ClassSize.ATOMIC_LONG;
187     if(expected != actual) {
188       ClassSize.estimateBase(cl, true);
189       assertEquals(expected, actual);
190     }
191 
192     // AtomicInteger
193     cl = AtomicInteger.class;
194     expected = ClassSize.estimateBase(cl, false);
195     actual = ClassSize.ATOMIC_INTEGER;
196     if(expected != actual) {
197       ClassSize.estimateBase(cl, true);
198       assertEquals(expected, actual);
199     }
200 
201     // AtomicBoolean
202     cl = AtomicBoolean.class;
203     expected = ClassSize.estimateBase(cl, false);
204     actual = ClassSize.ATOMIC_BOOLEAN;
205     if(expected != actual) {
206       ClassSize.estimateBase(cl, true);
207       assertEquals(expected, actual);
208     }
209 
210     // CopyOnWriteArraySet
211     cl = CopyOnWriteArraySet.class;
212     expected = ClassSize.estimateBase(cl, false);
213     actual = ClassSize.COPYONWRITE_ARRAYSET;
214     if(expected != actual) {
215       ClassSize.estimateBase(cl, true);
216       assertEquals(expected, actual);
217     }
218 
219     // CopyOnWriteArrayList
220     cl = CopyOnWriteArrayList.class;
221     expected = ClassSize.estimateBase(cl, false);
222     actual = ClassSize.COPYONWRITE_ARRAYLIST;
223     if(expected != actual) {
224       ClassSize.estimateBase(cl, true);
225       assertEquals(expected, actual);
226     }
227 
228 
229   }
230 
231   /**
232    * Testing the classes that implements HeapSize and are a part of 0.20.
233    * Some are not tested here for example BlockIndex which is tested in
234    * TestHFile since it is a non public class
235    * @throws IOException
236    */
237   @Test
238   public void testSizes() throws IOException {
239     Class<?> cl;
240     long expected;
241     long actual;
242 
243     //KeyValue
244     cl = KeyValue.class;
245     expected = ClassSize.estimateBase(cl, false);
246     KeyValue kv = new KeyValue();
247     actual = kv.heapSize();
248     if(expected != actual) {
249       ClassSize.estimateBase(cl, true);
250       assertEquals(expected, actual);
251     }
252 
253     //LruBlockCache Overhead
254     cl = LruBlockCache.class;
255     actual = LruBlockCache.CACHE_FIXED_OVERHEAD;
256     expected = ClassSize.estimateBase(cl, false);
257     if(expected != actual) {
258       ClassSize.estimateBase(cl, true);
259       assertEquals(expected, actual);
260     }
261 
262     // CachedBlock Fixed Overhead
263     // We really need "deep" sizing but ClassSize does not do this.
264     // Perhaps we should do all these more in this style....
265     cl = CachedBlock.class;
266     actual = CachedBlock.PER_BLOCK_OVERHEAD;
267     expected = ClassSize.estimateBase(cl, false);
268     expected += ClassSize.estimateBase(String.class, false);
269     expected += ClassSize.estimateBase(ByteBuffer.class, false);
270     if(expected != actual) {
271       ClassSize.estimateBase(cl, true);
272       ClassSize.estimateBase(String.class, true);
273       ClassSize.estimateBase(ByteBuffer.class, true);
274       assertEquals(expected, actual);
275     }
276 
277     // MemStore Overhead
278     cl = MemStore.class;
279     actual = MemStore.FIXED_OVERHEAD;
280     expected = ClassSize.estimateBase(cl, false);
281     if(expected != actual) {
282       ClassSize.estimateBase(cl, true);
283       assertEquals(expected, actual);
284     }
285 
286     // MemStore Deep Overhead
287     actual = MemStore.DEEP_OVERHEAD;
288     expected = ClassSize.estimateBase(cl, false);
289     expected += ClassSize.estimateBase(ReentrantReadWriteLock.class, false);
290     expected += ClassSize.estimateBase(AtomicLong.class, false);
291     expected += ClassSize.estimateBase(ConcurrentSkipListMap.class, false);
292     expected += ClassSize.estimateBase(ConcurrentSkipListMap.class, false);
293     expected += ClassSize.estimateBase(CopyOnWriteArraySet.class, false);
294     expected += ClassSize.estimateBase(CopyOnWriteArrayList.class, false);
295     if(expected != actual) {
296       ClassSize.estimateBase(cl, true);
297       ClassSize.estimateBase(ReentrantReadWriteLock.class, true);
298       ClassSize.estimateBase(AtomicLong.class, true);
299       ClassSize.estimateBase(ConcurrentSkipListMap.class, true);
300       ClassSize.estimateBase(CopyOnWriteArraySet.class, true);
301       ClassSize.estimateBase(CopyOnWriteArrayList.class, true);
302       assertEquals(expected, actual);
303     }
304 
305     // Store Overhead
306     cl = HStore.class;
307     actual = HStore.FIXED_OVERHEAD;
308     expected = ClassSize.estimateBase(cl, false);
309     if(expected != actual) {
310       ClassSize.estimateBase(cl, true);
311       assertEquals(expected, actual);
312     }
313 
314     // Region Overhead
315     cl = HRegion.class;
316     actual = HRegion.FIXED_OVERHEAD;
317     expected = ClassSize.estimateBase(cl, false);
318     if (expected != actual) {
319       ClassSize.estimateBase(cl, true);
320       assertEquals(expected, actual);
321     }
322 
323     // Block cache key overhead
324     cl = BlockCacheKey.class;
325     // Passing zero length file name, because estimateBase does not handle
326     // deep overhead.
327     actual = new BlockCacheKey("", 0).heapSize();
328     expected  = ClassSize.estimateBase(cl, false);
329     if (expected != actual) {
330       ClassSize.estimateBase(cl, true);
331       assertEquals(expected, actual);
332     }
333 
334     // Currently NOT testing Deep Overheads of many of these classes.
335     // Deep overheads cover a vast majority of stuff, but will not be 100%
336     // accurate because it's unclear when we're referencing stuff that's already
337     // accounted for.  But we have satisfied our two core requirements.
338     // Sizing is quite accurate now, and our tests will throw errors if
339     // any of these classes are modified without updating overhead sizes.
340   }
341 
342   @Test
343   public void testMutations(){
344     Class<?> cl;
345     long expected;
346     long actual;
347 
348     cl = TimeRange.class;
349     actual = ClassSize.TIMERANGE;
350     expected  = ClassSize.estimateBase(cl, false);
351     if (expected != actual) {
352       ClassSize.estimateBase(cl, true);
353       assertEquals(expected, actual);
354     }
355 
356     byte[] row = new byte[] { 0 };
357     cl = Put.class;
358     actual = new Put(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY);
359     expected = ClassSize.estimateBase(cl, false);
360     //The actual TreeMap is not included in the above calculation
361     expected += ClassSize.align(ClassSize.TREEMAP);
362     if (expected != actual) {
363       ClassSize.estimateBase(cl, true);
364       assertEquals(expected, actual);
365     }
366 
367     cl = Delete.class;
368     actual = new Delete(row).MUTATION_OVERHEAD + ClassSize.align(ClassSize.ARRAY);
369     expected  = ClassSize.estimateBase(cl, false);
370     //The actual TreeMap is not included in the above calculation
371     expected += ClassSize.align(ClassSize.TREEMAP);
372     if (expected != actual) {
373       ClassSize.estimateBase(cl, true);
374       assertEquals(expected, actual);
375     }
376   }
377 
378 }
379