1   /*
2    * Copyright 2011 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  
21  package org.apache.hadoop.hbase.io.hfile;
22  
23  import java.io.IOException;
24  import java.net.URL;
25  import java.util.Map;
26  
27  import org.apache.hadoop.conf.Configuration;
28  import org.apache.hadoop.fs.FileSystem;
29  import org.apache.hadoop.fs.Path;
30  import org.apache.hadoop.hbase.HBaseTestingUtility;
31  import org.apache.hadoop.hbase.SmallTests;
32  import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
33  import org.apache.hadoop.hbase.util.Bytes;
34  
35  import org.junit.After;
36  import org.junit.Before;
37  import org.junit.Test;
38  import org.junit.experimental.categories.Category;
39  
40  import static org.junit.Assert.*;
41  
42  @Category(SmallTests.class)
43  public class TestHFileReaderV1 {
44  
45    private static final HBaseTestingUtility TEST_UTIL =
46      new HBaseTestingUtility();
47  
48    private Configuration conf;
49    private FileSystem fs;
50    private Map<String, Long> startingMetrics;
51  
52    private static final int N = 1000;
53  
54    @Before
55    public void setUp() throws IOException {
56      startingMetrics = SchemaMetrics.getMetricsSnapshot();
57      conf = TEST_UTIL.getConfiguration();
58      fs = FileSystem.get(conf);
59      SchemaMetrics.configureGlobally(conf);
60    }
61  
62    @After
63    public void tearDown() throws Exception {
64      SchemaMetrics.validateMetricChanges(startingMetrics);
65    }
66  
67    @Test
68    public void testReadingExistingVersion1HFile() throws IOException {
69      URL url = TestHFileReaderV1.class.getResource(
70          "8e8ab58dcf39412da19833fcd8f687ac");
71      Path existingHFilePath = new Path(url.getPath());
72      HFile.Reader reader =
73        HFile.createReader(fs, existingHFilePath, new CacheConfig(conf));
74      reader.loadFileInfo();
75      FixedFileTrailer trailer = reader.getTrailer();
76  
77      assertEquals(N, reader.getEntries());
78      assertEquals(N, trailer.getEntryCount());
79      assertEquals(1, trailer.getMajorVersion());
80      assertEquals(Compression.Algorithm.GZ, trailer.getCompressionCodec());
81  
82      for (boolean pread : new boolean[] { false, true }) {
83        int totalDataSize = 0;
84        int n = 0;
85  
86        HFileScanner scanner = reader.getScanner(false, pread);
87        assertTrue(scanner.seekTo());
88        do {
89          totalDataSize += scanner.getKey().limit() + scanner.getValue().limit()
90              + Bytes.SIZEOF_INT * 2;
91          ++n;
92        } while (scanner.next());
93  
94        // Add magic record sizes, one per data block.
95        totalDataSize += 8 * trailer.getDataIndexCount();
96  
97        assertEquals(N, n);
98        assertEquals(trailer.getTotalUncompressedBytes(), totalDataSize);
99      }
100     reader.close();
101   }
102 
103 
104   @org.junit.Rule
105   public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
106     new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
107 }
108