1   /**
2    * Copyright 2010 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase.io.hfile;
21  
22  import java.util.ArrayList;
23  import java.util.List;
24  
25  import junit.framework.Assert;
26  
27  import org.apache.hadoop.fs.FSDataOutputStream;
28  import org.apache.hadoop.fs.Path;
29  import org.apache.hadoop.hbase.HBaseTestingUtility;
30  import org.apache.hadoop.hbase.SmallTests;
31  import org.apache.hadoop.hbase.util.Bytes;
32  import org.junit.Test;
33  import org.junit.experimental.categories.Category;
34  
35  import static org.junit.Assert.*;
36  
37  /**
38   * Test {@link HFileScanner#reseekTo(byte[])}
39   */
40  @Category(SmallTests.class)
41  public class TestReseekTo {
42  
43    private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
44  
45    @Test
46    public void testReseekTo() throws Exception {
47  
48      Path ncTFile = new Path(TEST_UTIL.getDataTestDir(), "basic.hfile");
49      FSDataOutputStream fout = TEST_UTIL.getTestFileSystem().create(ncTFile);
50      CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
51      HFile.Writer writer = HFile.getWriterFactory(
52          TEST_UTIL.getConfiguration(), cacheConf)
53              .withOutputStream(fout)
54              .withBlockSize(4000)
55              .create();
56      int numberOfKeys = 1000;
57  
58      String valueString = "Value";
59  
60      List<Integer> keyList = new ArrayList<Integer>();
61      List<String> valueList = new ArrayList<String>();
62  
63      for (int key = 0; key < numberOfKeys; key++) {
64        String value = valueString + key;
65        keyList.add(key);
66        valueList.add(value);
67        writer.append(Bytes.toBytes(key), Bytes.toBytes(value));
68      }
69      writer.close();
70      fout.close();
71  
72      HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(),
73          ncTFile, cacheConf);
74      reader.loadFileInfo();
75      HFileScanner scanner = reader.getScanner(false, true);
76  
77      scanner.seekTo();
78      for (int i = 0; i < keyList.size(); i++) {
79        Integer key = keyList.get(i);
80        String value = valueList.get(i);
81        long start = System.nanoTime();
82        scanner.seekTo(Bytes.toBytes(key));
83        assertEquals(value, scanner.getValueString());
84      }
85  
86      scanner.seekTo();
87      for (int i = 0; i < keyList.size(); i += 10) {
88        Integer key = keyList.get(i);
89        String value = valueList.get(i);
90        long start = System.nanoTime();
91        scanner.reseekTo(Bytes.toBytes(key));
92        assertEquals("i is " + i, value, scanner.getValueString());
93      }
94  
95      reader.close();
96    }
97  
98  
99    @org.junit.Rule
100   public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
101     new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
102 }
103