1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.codec.prefixtree.builder;
20
21 import java.util.Collection;
22 import java.util.List;
23
24 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.Tokenizer;
25 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerNode;
26 import org.apache.hadoop.hbase.codec.prefixtree.encode.tokenize.TokenizerRowSearchResult;
27 import org.apache.hadoop.hbase.util.SimpleByteRange;
28 import org.apache.hadoop.hbase.util.Bytes;
29 import org.junit.Assert;
30 import org.junit.Test;
31 import org.junit.runner.RunWith;
32 import org.junit.runners.Parameterized;
33 import org.junit.runners.Parameterized.Parameters;
34
35 @RunWith(Parameterized.class)
36 public class TestTokenizer {
37
38 @Parameters
39 public static Collection<Object[]> parameters() {
40 return new TestTokenizerData.InMemory().getAllAsObjectArray();
41 }
42
43 private List<byte[]> inputs;
44 private Tokenizer builder;
45 private List<byte[]> roundTripped;
46
47 public TestTokenizer(TestTokenizerData sortedByteArrays) {
48 this.inputs = sortedByteArrays.getInputs();
49 this.builder = new Tokenizer();
50 for (byte[] array : inputs) {
51 builder.addSorted(new SimpleByteRange(array));
52 }
53 this.roundTripped = builder.getArrays();
54 }
55
56 @Test
57 public void testReaderRoundTrip() {
58 Assert.assertEquals(inputs.size(), roundTripped.size());
59 Assert.assertTrue(Bytes.isSorted(roundTripped));
60 Assert.assertTrue(Bytes.equals(inputs, roundTripped));
61 }
62
63 @Test
64 public void testSearching() {
65 for (byte[] input : inputs) {
66 TokenizerRowSearchResult resultHolder = new TokenizerRowSearchResult();
67 builder.getNode(resultHolder, input, 0, input.length);
68 TokenizerNode n = resultHolder.getMatchingNode();
69 byte[] output = n.getNewByteArray();
70 Assert.assertTrue(Bytes.equals(input, output));
71 }
72 }
73
74 }