View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one or more
3    * contributor license agreements. See the NOTICE file distributed with this
4    * work for additional information regarding copyright ownership. The ASF
5    * licenses this file to you under the Apache License, Version 2.0 (the
6    * "License"); you may not use this file except in compliance with the License.
7    * You may obtain a copy of the License at
8    *
9    * http://www.apache.org/licenses/LICENSE-2.0
10   *
11   * Unless required by applicable law or agreed to in writing, software
12   * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13   * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14   * License for the specific language governing permissions and limitations
15   * under the License.
16   */
17  package org.apache.hadoop.hbase.io.hfile;
18  
19  import static org.junit.Assert.assertEquals;
20  import static org.junit.Assert.assertTrue;
21  
22  import java.io.IOException;
23  import java.nio.ByteBuffer;
24  import java.util.ArrayList;
25  import java.util.Collection;
26  import java.util.List;
27  
28  import org.apache.hadoop.hbase.HConstants;
29  import org.apache.hadoop.hbase.SmallTests;
30  import org.apache.hadoop.hbase.io.HeapSize;
31  import org.apache.hadoop.hbase.io.compress.Compression;
32  import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
33  import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
34  import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
35  import org.apache.hadoop.hbase.util.ChecksumType;
36  import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
37  import org.junit.Test;
38  import org.junit.experimental.categories.Category;
39  import org.junit.runner.RunWith;
40  import org.junit.runners.Parameterized;
41  import org.junit.runners.Parameterized.Parameters;
42  
43  @RunWith(Parameterized.class)
44  @Category(SmallTests.class)
45  public class TestHFileDataBlockEncoder {
46    private HFileDataBlockEncoderImpl blockEncoder;
47    private RedundantKVGenerator generator = new RedundantKVGenerator();
48    private boolean includesMemstoreTS;
49  
50    /**
51     * Create test for given data block encoding configuration.
52     * @param blockEncoder What kind of encoding policy will be used.
53     */
54    public TestHFileDataBlockEncoder(HFileDataBlockEncoderImpl blockEncoder,
55        boolean includesMemstoreTS) {
56      this.blockEncoder = blockEncoder;
57      this.includesMemstoreTS = includesMemstoreTS;
58      System.err.println("Encoding: " + blockEncoder.getDataBlockEncoding()
59          + ", includesMemstoreTS: " + includesMemstoreTS);
60    }
61  
62    /**
63     * Test putting and taking out blocks into cache with different
64     * encoding options.
65     * @throws IOException 
66     */
67    @Test
68    public void testEncodingWithCache() throws IOException {
69      HFileBlock block = getSampleHFileBlock();
70      LruBlockCache blockCache =
71          new LruBlockCache(8 * 1024 * 1024, 32 * 1024);
72      HFileBlock cacheBlock = createBlockOnDisk(block);
73      BlockCacheKey cacheKey = new BlockCacheKey("test", 0);
74      blockCache.cacheBlock(cacheKey, cacheBlock);
75  
76      HeapSize heapSize = blockCache.getBlock(cacheKey, false, false);
77      assertTrue(heapSize instanceof HFileBlock);
78  
79      HFileBlock returnedBlock = (HFileBlock) heapSize;;
80  
81      if (blockEncoder.getDataBlockEncoding() ==
82          DataBlockEncoding.NONE) {
83        assertEquals(block.getBufferWithHeader(),
84            returnedBlock.getBufferWithHeader());
85      } else {
86        if (BlockType.ENCODED_DATA != returnedBlock.getBlockType()) {
87          System.out.println(blockEncoder);
88        }
89        assertEquals(BlockType.ENCODED_DATA, returnedBlock.getBlockType());
90      }
91    }
92  
93    /** Test for HBASE-5746. */
94    @Test
95    public void testHeaderSizeInCacheWithoutChecksum() throws Exception {
96      int headerSize = HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
97      // Create some KVs and create the block with old-style header.
98      ByteBuffer keyValues = RedundantKVGenerator.convertKvToByteBuffer(
99          generator.generateTestKeyValues(60), includesMemstoreTS);
100     int size = keyValues.limit();
101     ByteBuffer buf = ByteBuffer.allocate(size + headerSize);
102     buf.position(headerSize);
103     keyValues.rewind();
104     buf.put(keyValues);
105     HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf,
106         HFileBlock.FILL_HEADER, 0, includesMemstoreTS,
107         HFileBlock.MINOR_VERSION_NO_CHECKSUM, 0, ChecksumType.NULL.getCode(), 0);
108     HFileBlock cacheBlock = createBlockOnDisk(block);
109     assertEquals(headerSize, cacheBlock.getDummyHeaderForVersion().length);
110   }
111 
112   private HFileBlock createBlockOnDisk(HFileBlock block) throws IOException {
113     int size;
114     HFileBlockEncodingContext context = new HFileBlockDefaultEncodingContext(
115         Compression.Algorithm.NONE, blockEncoder.getDataBlockEncoding(),
116         HConstants.HFILEBLOCK_DUMMY_HEADER);
117     context.setDummyHeader(block.getDummyHeaderForVersion());
118     blockEncoder.beforeWriteToDisk(block.getBufferWithoutHeader(),
119             includesMemstoreTS, context, block.getBlockType());
120     byte[] encodedBytes = context.getUncompressedBytesWithHeader();
121     size = encodedBytes.length - block.getDummyHeaderForVersion().length;
122     return new HFileBlock(context.getBlockType(), size, size, -1,
123             ByteBuffer.wrap(encodedBytes), HFileBlock.FILL_HEADER, 0, includesMemstoreTS,
124             block.getMinorVersion(), block.getBytesPerChecksum(), block.getChecksumType(),
125             block.getOnDiskDataSizeWithHeader());
126   }
127 
128   /**
129    * Test writing to disk.
130    * @throws IOException
131    */
132   @Test
133   public void testEncodingWritePath() throws IOException {
134     // usually we have just block without headers, but don't complicate that
135     HFileBlock block = getSampleHFileBlock();
136     HFileBlock blockOnDisk = createBlockOnDisk(block);
137 
138     if (blockEncoder.getDataBlockEncoding() !=
139         DataBlockEncoding.NONE) {
140       assertEquals(BlockType.ENCODED_DATA, blockOnDisk.getBlockType());
141       assertEquals(blockEncoder.getDataBlockEncoding().getId(),
142           blockOnDisk.getDataBlockEncodingId());
143     } else {
144       assertEquals(BlockType.DATA, blockOnDisk.getBlockType());
145     }
146   }
147 
148   private HFileBlock getSampleHFileBlock() {
149     ByteBuffer keyValues = RedundantKVGenerator.convertKvToByteBuffer(
150         generator.generateTestKeyValues(60), includesMemstoreTS);
151     int size = keyValues.limit();
152     ByteBuffer buf = ByteBuffer.allocate(size + HConstants.HFILEBLOCK_HEADER_SIZE);
153     buf.position(HConstants.HFILEBLOCK_HEADER_SIZE);
154     keyValues.rewind();
155     buf.put(keyValues);
156     HFileBlock b = new HFileBlock(BlockType.DATA, size, size, -1, buf,
157         HFileBlock.FILL_HEADER, 0, includesMemstoreTS, 
158         HFileReaderV2.MAX_MINOR_VERSION, 0, ChecksumType.NULL.getCode(), 0);
159     return b;
160   }
161 
162   /**
163    * @return All possible data block encoding configurations
164    */
165   @Parameters
166   public static Collection<Object[]> getAllConfigurations() {
167     List<Object[]> configurations =
168         new ArrayList<Object[]>();
169 
170     for (DataBlockEncoding diskAlgo : DataBlockEncoding.values()) {
171       for (boolean includesMemstoreTS : new boolean[] {false, true}) {
172         configurations.add(new Object[] {
173             new HFileDataBlockEncoderImpl(diskAlgo),
174             new Boolean(includesMemstoreTS)});
175       }
176     }
177 
178     return configurations;
179   }
180 }