View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one or more
3    * contributor license agreements. See the NOTICE file distributed with this
4    * work for additional information regarding copyright ownership. The ASF
5    * licenses this file to you under the Apache License, Version 2.0 (the
6    * "License"); you may not use this file except in compliance with the License.
7    * You may obtain a copy of the License at
8    *
9    * http://www.apache.org/licenses/LICENSE-2.0
10   *
11   * Unless required by applicable law or agreed to in writing, software
12   * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13   * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14   * License for the specific language governing permissions and limitations
15   * under the License.
16   */
17  package org.apache.hadoop.hbase.io.encoding;
18  
19  import static org.junit.Assert.assertEquals;
20  import static org.junit.Assert.assertTrue;
21  
22  import java.io.IOException;
23  import java.util.ArrayList;
24  import java.util.Collection;
25  import java.util.List;
26  import java.util.Map;
27  
28  import org.apache.hadoop.hbase.HBaseTestingUtility;
29  import org.apache.hadoop.hbase.HColumnDescriptor;
30  import org.apache.hadoop.hbase.KeyValue;
31  import org.apache.hadoop.hbase.MediumTests;
32  import org.apache.hadoop.hbase.client.Get;
33  import org.apache.hadoop.hbase.client.Put;
34  import org.apache.hadoop.hbase.client.Result;
35  import org.apache.hadoop.hbase.io.hfile.CacheConfig;
36  import org.apache.hadoop.hbase.io.hfile.LruBlockCache;
37  import org.apache.hadoop.hbase.regionserver.BloomType;
38  import org.apache.hadoop.hbase.regionserver.HRegion;
39  import org.apache.hadoop.hbase.util.Bytes;
40  import org.apache.hadoop.hbase.util.Strings;
41  import org.apache.hadoop.hbase.util.test.LoadTestKVGenerator;
42  import org.junit.Test;
43  import org.junit.experimental.categories.Category;
44  import org.junit.runner.RunWith;
45  import org.junit.runners.Parameterized;
46  import org.junit.runners.Parameterized.Parameters;
47  
48  /**
49   * Tests encoded seekers by loading and reading values.
50   */
51  @Category(MediumTests.class)
52  @RunWith(Parameterized.class)
53  public class TestEncodedSeekers {
54  
55    private static final String TABLE_NAME = "encodedSeekersTable";
56    private static final String CF_NAME = "encodedSeekersCF";
57    private static final byte[] CF_BYTES = Bytes.toBytes(CF_NAME);
58    private static final int MAX_VERSIONS = 5;
59  
60    private static final int BLOCK_SIZE = 64 * 1024;
61    private static final int MIN_VALUE_SIZE = 30;
62    private static final int MAX_VALUE_SIZE = 60;
63    private static final int NUM_ROWS = 1003;
64    private static final int NUM_COLS_PER_ROW = 20;
65    private static final int NUM_HFILES = 4;
66    private static final int NUM_ROWS_PER_FLUSH = NUM_ROWS / NUM_HFILES;
67  
68    private final HBaseTestingUtility testUtil = HBaseTestingUtility.createLocalHTU();
69    private final DataBlockEncoding encoding;
70    private final boolean encodeOnDisk;
71  
72    /** Enable when debugging */
73    private static final boolean VERBOSE = false;
74  
75    @Parameters
76    public static Collection<Object[]> parameters() {
77      List<Object[]> paramList = new ArrayList<Object[]>();
78      for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
79        for (boolean encodeOnDisk : new boolean[]{false, true}) {
80          paramList.add(new Object[] { encoding, encodeOnDisk });
81        }
82      }
83      return paramList;
84    }
85  
86    public TestEncodedSeekers(DataBlockEncoding encoding, boolean encodeOnDisk) {
87      this.encoding = encoding;
88      this.encodeOnDisk = encodeOnDisk;
89    }
90  
91    @Test
92    public void testEncodedSeeker() throws IOException {
93      System.err.println("Testing encoded seekers for encoding " + encoding);
94      LruBlockCache cache =
95        (LruBlockCache)new CacheConfig(testUtil.getConfiguration()).getBlockCache();
96      cache.clearCache();
97      // Need to disable default row bloom filter for this test to pass.
98      HColumnDescriptor hcd = (new HColumnDescriptor(CF_NAME)).setMaxVersions(MAX_VERSIONS).
99          setDataBlockEncoding(encoding).
100         setBlocksize(BLOCK_SIZE).
101         setBloomFilterType(BloomType.NONE);
102     HRegion region = testUtil.createTestRegion(TABLE_NAME, hcd);
103 
104     //write the data, but leave some in the memstore
105     doPuts(region);
106 
107     //verify correctness when memstore contains data
108     doGets(region);
109 
110     //verify correctness again after compacting
111     region.compactStores();
112     doGets(region);
113 
114 
115     Map<DataBlockEncoding, Integer> encodingCounts = cache.getEncodingCountsForTest();
116 
117     // Ensure that compactions don't pollute the cache with unencoded blocks
118     // in case of in-cache-only encoding.
119     System.err.println("encodingCounts=" + encodingCounts);
120     assertEquals(1, encodingCounts.size());
121     DataBlockEncoding encodingInCache = encodingCounts.keySet().iterator().next();
122     assertEquals(encoding, encodingInCache);
123     assertTrue(encodingCounts.get(encodingInCache) > 0);
124   }
125 
126 
127   private void doPuts(HRegion region) throws IOException{
128     LoadTestKVGenerator dataGenerator = new LoadTestKVGenerator(MIN_VALUE_SIZE, MAX_VALUE_SIZE);
129      for (int i = 0; i < NUM_ROWS; ++i) {
130       byte[] key = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();
131       for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
132         Put put = new Put(key);
133         byte[] col = Bytes.toBytes(String.valueOf(j));
134         byte[] value = dataGenerator.generateRandomSizeValue(key, col);
135         put.add(CF_BYTES, col, value);
136         if(VERBOSE){
137           KeyValue kvPut = new KeyValue(key, CF_BYTES, col, value);
138           System.err.println(Strings.padFront(i+"", ' ', 4)+" "+kvPut);
139         }
140         region.put(put);
141       }
142       if (i % NUM_ROWS_PER_FLUSH == 0) {
143         region.flushcache();
144       }
145     }
146   }
147 
148 
149   private void doGets(HRegion region) throws IOException{
150     for (int i = 0; i < NUM_ROWS; ++i) {
151       final byte[] rowKey = LoadTestKVGenerator.md5PrefixedKey(i).getBytes();
152       for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
153         final String qualStr = String.valueOf(j);
154         if (VERBOSE) {
155           System.err.println("Reading row " + i + ", column " + j + " " + Bytes.toString(rowKey)+"/"
156               +qualStr);
157         }
158         final byte[] qualBytes = Bytes.toBytes(qualStr);
159         Get get = new Get(rowKey);
160         get.addColumn(CF_BYTES, qualBytes);
161         Result result = region.get(get);
162         assertEquals(1, result.size());
163         byte[] value = result.getValue(CF_BYTES, qualBytes);
164         assertTrue(LoadTestKVGenerator.verify(value, rowKey, qualBytes));
165       }
166     }
167   }
168 
169 }