1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one or more
3    * contributor license agreements. See the NOTICE file distributed with this
4    * work for additional information regarding copyright ownership. The ASF
5    * licenses this file to you under the Apache License, Version 2.0 (the
6    * "License"); you may not use this file except in compliance with the License.
7    * You may obtain a copy of the License at
8    *
9    * http://www.apache.org/licenses/LICENSE-2.0
10   *
11   * Unless required by applicable law or agreed to in writing, software
12   * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13   * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14   * License for the specific language governing permissions and limitations
15   * under the License.
16   */
17  package org.apache.hadoop.hbase.io.hfile;
18  
19  import static org.junit.Assert.assertEquals;
20  
21  import java.io.IOException;
22  import java.util.ArrayList;
23  import java.util.Collection;
24  import java.util.HashMap;
25  import java.util.List;
26  import java.util.Map;
27  import java.util.Set;
28  
29  import org.apache.commons.logging.Log;
30  import org.apache.commons.logging.LogFactory;
31  import org.apache.hadoop.conf.Configuration;
32  import org.apache.hadoop.hbase.HBaseTestingUtility;
33  import org.apache.hadoop.hbase.HColumnDescriptor;
34  import org.apache.hadoop.hbase.HRegionInfo;
35  import org.apache.hadoop.hbase.HTableDescriptor;
36  import org.apache.hadoop.hbase.KeyValue;
37  import org.apache.hadoop.hbase.SmallTests;
38  import org.apache.hadoop.hbase.client.Put;
39  import org.apache.hadoop.hbase.client.Scan;
40  import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
41  import org.apache.hadoop.hbase.regionserver.HRegion;
42  import org.apache.hadoop.hbase.regionserver.InternalScanner;
43  import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
44  import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
45  import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.BlockMetricType;
46  import org.apache.hadoop.hbase.util.Bytes;
47  import org.junit.AfterClass;
48  import org.junit.Test;
49  import org.junit.experimental.categories.Category;
50  import org.junit.runner.RunWith;
51  import org.junit.runners.Parameterized;
52  import org.junit.runners.Parameterized.Parameters;
53  
54  /**
55   * Test the optimization that does not scan files where all key ranges are excluded.
56   */
57  @RunWith(Parameterized.class)
58  @Category(SmallTests.class)
59  public class TestScannerSelectionUsingKeyRange {
60    private static final Log LOG = LogFactory.getLog(TestScannerSelectionUsingKeyRange.class);
61    private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
62    private static String TABLE = "myTable";
63    private static String FAMILY = "myCF";
64    private static byte[] FAMILY_BYTES = Bytes.toBytes(FAMILY);
65    private static final int NUM_ROWS = 8;
66    private static final int NUM_COLS_PER_ROW = 5;
67    private static final int NUM_FILES = 2;
68    private static final Map<Object, Integer> TYPE_COUNT = new HashMap<Object, Integer>(3);
69    static {
70      TYPE_COUNT.put(BloomType.ROWCOL, 2);
71      TYPE_COUNT.put(BloomType.ROW, 2);
72      TYPE_COUNT.put(BloomType.NONE, 2);
73    }
74  
75    private BloomType bloomType;
76    private int expectedCount;
77  
78    @Parameters
79    public static Collection<Object[]> parameters() {
80      List<Object[]> params = new ArrayList<Object[]>();
81      for (Object type : TYPE_COUNT.keySet()) {
82          params.add(new Object[] { type, TYPE_COUNT.get(type) });
83      }
84      return params;
85    }
86  
87    public TestScannerSelectionUsingKeyRange(Object expectedType, Object expectedCount) {
88      bloomType = (BloomType)expectedType;
89      expectedCount = expectedCount;
90    }
91    
92    @AfterClass
93    public static void tearDownAfterClass() throws Exception {
94      TEST_UTIL.cleanupTestDir();
95    }
96  
97    @Test
98    public void testScannerSelection() throws IOException {
99      Configuration conf = TEST_UTIL.getConfiguration();
100     conf.setInt("hbase.hstore.compactionThreshold", 10000);
101     HColumnDescriptor hcd = new HColumnDescriptor(FAMILY_BYTES).setBlockCacheEnabled(true)
102         .setBloomFilterType(bloomType);
103     HTableDescriptor htd = new HTableDescriptor(TABLE);
104     htd.addFamily(hcd);
105     HRegionInfo info = new HRegionInfo(Bytes.toBytes(TABLE));
106     HRegion region = HRegion.createHRegion(info, TEST_UTIL.getClusterTestDir(), conf, htd);
107 
108     for (int iFile = 0; iFile < NUM_FILES; ++iFile) {
109       for (int iRow = 0; iRow < NUM_ROWS; ++iRow) {
110         Put put = new Put(Bytes.toBytes("row" + iRow));
111         for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) {
112           put.add(FAMILY_BYTES, Bytes.toBytes("col" + iCol),
113               Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));
114         }
115         region.put(put);
116       }
117       region.flushcache();
118     }
119 
120     Scan scan = new Scan(Bytes.toBytes("aaa"), Bytes.toBytes("aaz"));
121     CacheConfig cacheConf = new CacheConfig(conf);
122     LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache();
123     cache.clearCache();
124     Map<String, Long> metricsBefore = SchemaMetrics.getMetricsSnapshot();
125     SchemaMetrics.validateMetricChanges(metricsBefore);
126     InternalScanner scanner = region.getScanner(scan);
127     List<KeyValue> results = new ArrayList<KeyValue>();
128     while (scanner.next(results)) {
129     }
130     scanner.close();
131     assertEquals(0, results.size());
132     Set<String> accessedFiles = cache.getCachedFileNamesForTest();
133     assertEquals(accessedFiles.size(), 0);
134     //assertEquals(cache.getBlockCount(), 0);
135     Map<String, Long> diffMetrics = SchemaMetrics.diffMetrics(metricsBefore,
136       SchemaMetrics.getMetricsSnapshot());
137     SchemaMetrics schemaMetrics = SchemaMetrics.getInstance(TABLE, FAMILY);
138     long dataBlockRead = SchemaMetrics.getLong(diffMetrics,
139       schemaMetrics.getBlockMetricName(BlockCategory.DATA, false, BlockMetricType.READ_COUNT));
140     assertEquals(dataBlockRead, 0);
141     region.close();
142   }
143 }