View Javadoc

1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  
19  package org.apache.hadoop.hbase.codec.prefixtree.builder;
20  
21  import java.util.Collection;
22  import java.util.List;
23  
24  import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
25  import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
26  import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerRowSearchResult;
27  import org.apache.hadoop.hbase.util.SimpleByteRange;
28  import org.apache.hadoop.hbase.util.Bytes;
29  import org.junit.Assert;
30  import org.junit.Test;
31  import org.junit.runner.RunWith;
32  import org.junit.runners.Parameterized;
33  import org.junit.runners.Parameterized.Parameters;
34  
35  @RunWith(Parameterized.class)
36  public class TestTokenizer {
37  
38    @Parameters
39    public static Collection<Object[]> parameters() {
40      return new TestTokenizerData.InMemory().getAllAsObjectArray();
41    }
42  
43    private List<byte[]> inputs;
44    private Tokenizer builder;
45    private List<byte[]> roundTripped;
46  
47    public TestTokenizer(TestTokenizerData sortedByteArrays) {
48      this.inputs = sortedByteArrays.getInputs();
49      this.builder = new Tokenizer();
50      for (byte[] array : inputs) {
51        builder.addSorted(new SimpleByteRange(array));
52      }
53      this.roundTripped = builder.getArrays();
54    }
55  
56    @Test
57    public void testReaderRoundTrip() {
58      Assert.assertEquals(inputs.size(), roundTripped.size());
59      Assert.assertTrue(Bytes.isSorted(roundTripped));
60      Assert.assertTrue(Bytes.equals(inputs, roundTripped));
61    }
62  
63    @Test
64    public void testSearching() {
65      for (byte[] input : inputs) {
66        TokenizerRowSearchResult resultHolder = new TokenizerRowSearchResult();
67        builder.getNode(resultHolder, input, 0, input.length);
68        TokenizerNode n = resultHolder.getMatchingNode();
69        byte[] output = n.getNewByteArray();
70        Assert.assertTrue(Bytes.equals(input, output));
71      }
72    }
73  
74  }