1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.filter;
19  
20  import static org.junit.Assert.assertEquals;
21  
22  import java.io.IOException;
23  import java.util.ArrayList;
24  import java.util.HashSet;
25  import java.util.List;
26  import java.util.Set;
27  
28  import org.apache.hadoop.hbase.HBaseTestingUtility;
29  import org.apache.hadoop.hbase.HColumnDescriptor;
30  import org.apache.hadoop.hbase.HRegionInfo;
31  import org.apache.hadoop.hbase.HTableDescriptor;
32  import org.apache.hadoop.hbase.KeyValue;
33  import org.apache.hadoop.hbase.KeyValueTestUtil;
34  import org.apache.hadoop.hbase.SmallTests;
35  import org.apache.hadoop.hbase.client.Get;
36  import org.apache.hadoop.hbase.client.Put;
37  import org.apache.hadoop.hbase.client.Scan;
38  import org.apache.hadoop.hbase.regionserver.HRegion;
39  import org.apache.hadoop.hbase.regionserver.InternalScanner;
40  import org.apache.hadoop.hbase.util.Bytes;
41  import org.junit.Test;
42  import org.junit.experimental.categories.Category;
43  
44  @Category(SmallTests.class)
45  public class TestColumnCountGetFilter {
46  
47    private final static HBaseTestingUtility TEST_UTIL = new
48        HBaseTestingUtility();
49  
50    @Test
51    public void testColumnCountGetFilter() throws IOException {
52      String family = "Family";
53      HTableDescriptor htd = new HTableDescriptor("testColumnCountGetFilter");
54      htd.addFamily(new HColumnDescriptor(family));
55      HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
56      HRegion region = HRegion.createHRegion(info, TEST_UTIL.
57        getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
58      try {
59        String valueString = "ValueString";
60        String row = "row-1";
61        List<String> columns = generateRandomWords(10000, "column");
62        Put p = new Put(Bytes.toBytes(row));
63        p.setWriteToWAL(false);
64        for (String column : columns) {
65          KeyValue kv = KeyValueTestUtil.create(row, family, column, 0, valueString);
66          p.add(kv);
67        }
68        region.put(p);
69  
70        Get get = new Get(row.getBytes());
71        Filter filter = new ColumnCountGetFilter(100);
72        get.setFilter(filter);
73        Scan scan = new Scan(get);
74        InternalScanner scanner = region.getScanner(scan);
75        List<KeyValue> results = new ArrayList<KeyValue>();
76        scanner.next(results);
77        assertEquals(100, results.size());
78      } finally {
79        region.close();
80        region.getLog().closeAndDelete();
81      }
82  
83      region.close();
84      region.getLog().closeAndDelete();
85    }
86  
87    @Test
88    public void testColumnCountGetFilterWithFilterList() throws IOException {
89      String family = "Family";
90      HTableDescriptor htd = new HTableDescriptor("testColumnCountGetFilter");
91      htd.addFamily(new HColumnDescriptor(family));
92      HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
93      HRegion region = HRegion.createHRegion(info, TEST_UTIL.
94        getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
95      try {
96        String valueString = "ValueString";
97        String row = "row-1";
98        List<String> columns = generateRandomWords(10000, "column");
99        Put p = new Put(Bytes.toBytes(row));
100       p.setWriteToWAL(false);
101       for (String column : columns) {
102         KeyValue kv = KeyValueTestUtil.create(row, family, column, 0, valueString);
103         p.add(kv);
104       }
105       region.put(p);
106 
107       Get get = new Get(row.getBytes());
108       FilterList filterLst = new FilterList ();
109       filterLst.addFilter( new ColumnCountGetFilter(100));
110       get.setFilter(filterLst);
111       Scan scan = new Scan(get);
112       InternalScanner scanner = region.getScanner(scan);
113       List<KeyValue> results = new ArrayList<KeyValue>();
114       scanner.next(results);
115       assertEquals(100, results.size());
116     } finally {
117       region.close();
118       region.getLog().closeAndDelete();
119     }
120 
121     region.close();
122     region.getLog().closeAndDelete();
123   }
124 
125   List<String> generateRandomWords(int numberOfWords, String suffix) {
126     Set<String> wordSet = new HashSet<String>();
127     for (int i = 0; i < numberOfWords; i++) {
128       int lengthOfWords = (int) (Math.random()*2) + 1;
129       char[] wordChar = new char[lengthOfWords];
130       for (int j = 0; j < wordChar.length; j++) {
131         wordChar[j] = (char) (Math.random() * 26 + 97);
132       }
133       String word;
134       if (suffix == null) {
135         word = new String(wordChar);
136       } else {
137         word = new String(wordChar) + suffix;
138       }
139       wordSet.add(word);
140     }
141     List<String> wordList = new ArrayList<String>(wordSet);
142     return wordList;
143   }
144 
145   @org.junit.Rule
146   public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
147     new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
148 }
149