1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.regionserver;
19  import java.util.ArrayList;
20  import java.util.List;
21  import java.util.Map;
22  
23  import org.apache.hadoop.hbase.HBaseTestCase;
24  import org.apache.hadoop.hbase.HBaseTestingUtility;
25  import org.apache.hadoop.hbase.HColumnDescriptor;
26  import org.apache.hadoop.hbase.HTableDescriptor;
27  import org.apache.hadoop.hbase.KeyValue;
28  import org.apache.hadoop.hbase.SmallTests;
29  import org.apache.hadoop.hbase.client.Scan;
30  import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
31  import org.apache.hadoop.hbase.io.hfile.Compression;
32  import org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory;
33  import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics;
34  import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics.BlockMetricType;
35  import org.apache.hadoop.hbase.util.Bytes;
36  import org.junit.Assert;
37  import org.junit.Test;
38  import org.junit.experimental.categories.Category;
39  
40  @SuppressWarnings("deprecation")
41  @Category(SmallTests.class)
42  public class TestBlocksScanned extends HBaseTestCase {
43    private static byte [] FAMILY = Bytes.toBytes("family");
44    private static byte [] COL = Bytes.toBytes("col");
45    private static byte [] START_KEY = Bytes.toBytes("aaa");
46    private static byte [] END_KEY = Bytes.toBytes("zzz");
47    private static int BLOCK_SIZE = 70;
48  
49    private static HBaseTestingUtility TEST_UTIL = null;
50  
51    @Override
52    public void setUp() throws Exception {
53      super.setUp();
54      SchemaMetrics.setUseTableNameInTest(true);
55      TEST_UTIL = new HBaseTestingUtility();
56    }
57  
58    @Test
59    public void testBlocksScanned() throws Exception {
60      byte [] tableName = Bytes.toBytes("TestBlocksScanned");
61      HTableDescriptor table = new HTableDescriptor(tableName);
62  
63      table.addFamily(
64          new HColumnDescriptor(FAMILY)
65          .setMaxVersions(10)
66          .setBlockCacheEnabled(true)
67          .setBlocksize(BLOCK_SIZE)
68          .setCompressionType(Compression.Algorithm.NONE)
69          );
70      _testBlocksScanned(table);
71    }
72  
73    @Test
74    public void testBlocksScannedWithEncoding() throws Exception {
75      byte [] tableName = Bytes.toBytes("TestBlocksScannedWithEncoding");
76      HTableDescriptor table = new HTableDescriptor(tableName);
77  
78      table.addFamily(
79          new HColumnDescriptor(FAMILY)
80          .setMaxVersions(10)
81          .setBlockCacheEnabled(true)
82          .setDataBlockEncoding(DataBlockEncoding.FAST_DIFF)
83          .setBlocksize(BLOCK_SIZE)
84          .setCompressionType(Compression.Algorithm.NONE)
85          );
86      _testBlocksScanned(table);
87    }
88  
89    private void _testBlocksScanned(HTableDescriptor table) throws Exception {
90      HRegion r = createNewHRegion(table, START_KEY, END_KEY,
91          TEST_UTIL.getConfiguration());
92      addContent(r, FAMILY, COL);
93      r.flushcache();
94  
95      // Get the per-cf metrics
96      SchemaMetrics schemaMetrics =
97        SchemaMetrics.getInstance(Bytes.toString(table.getName()), Bytes.toString(FAMILY));
98      Map<String, Long> schemaMetricSnapshot = SchemaMetrics.getMetricsSnapshot();
99  
100     // Do simple test of getting one row only first.
101     Scan scan = new Scan(Bytes.toBytes("aaa"), Bytes.toBytes("aaz"));
102     scan.addColumn(FAMILY, COL);
103     scan.setMaxVersions(1);
104 
105     InternalScanner s = r.getScanner(scan);
106     List<KeyValue> results = new ArrayList<KeyValue>();
107     while (s.next(results));
108     s.close();
109 
110     int expectResultSize = 'z' - 'a';
111     Assert.assertEquals(expectResultSize, results.size());
112 
113     int kvPerBlock = (int) Math.ceil(BLOCK_SIZE / (double) results.get(0).getLength());
114     Assert.assertEquals(2, kvPerBlock);
115 
116     long expectDataBlockRead = (long) Math.ceil(expectResultSize / (double) kvPerBlock);
117     long expectIndexBlockRead = expectDataBlockRead;
118 
119     verifyDataAndIndexBlockRead(schemaMetricSnapshot, schemaMetrics,
120         expectDataBlockRead, expectIndexBlockRead);
121   }
122 
123   private void verifyDataAndIndexBlockRead(Map<String, Long> previousMetricSnapshot,
124       SchemaMetrics schemaMetrics, long expectDataBlockRead, long expectedIndexBlockRead){
125     Map<String, Long> currentMetricsSnapshot = SchemaMetrics.getMetricsSnapshot();
126     Map<String, Long> diffs =
127       SchemaMetrics.diffMetrics(previousMetricSnapshot, currentMetricsSnapshot);
128 
129     long dataBlockRead = SchemaMetrics.getLong(diffs,
130         schemaMetrics.getBlockMetricName(BlockCategory.DATA, false, BlockMetricType.READ_COUNT));
131     long indexBlockRead = SchemaMetrics.getLong(diffs,
132         schemaMetrics.getBlockMetricName(BlockCategory.INDEX, false, BlockMetricType.READ_COUNT));
133 
134     Assert.assertEquals(expectDataBlockRead, dataBlockRead);
135     Assert.assertEquals(expectedIndexBlockRead, indexBlockRead);
136   }
137 }