1   /**
2    * Copyright 2007 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase.filter;
21  
22  import java.io.ByteArrayInputStream;
23  import java.io.ByteArrayOutputStream;
24  import java.io.DataInputStream;
25  import java.io.DataOutputStream;
26  
27  import junit.framework.TestCase;
28  
29  /**
30   * Tests for the page filter
31   */
32  public class TestPageFilter extends TestCase {
33    static final int ROW_LIMIT = 3;
34  
35    /**
36     * test page size filter
37     * @throws Exception
38     */
39    public void testPageSize() throws Exception {
40      Filter f = new PageFilter(ROW_LIMIT);
41      pageSizeTests(f);
42    }
43  
44    /**
45     * Test filter serialization
46     * @throws Exception
47     */
48    public void testSerialization() throws Exception {
49      Filter f = new PageFilter(ROW_LIMIT);
50      // Decompose mainFilter to bytes.
51      ByteArrayOutputStream stream = new ByteArrayOutputStream();
52      DataOutputStream out = new DataOutputStream(stream);
53      f.write(out);
54      out.close();
55      byte[] buffer = stream.toByteArray();
56      // Recompose mainFilter.
57      DataInputStream in = new DataInputStream(new ByteArrayInputStream(buffer));
58      Filter newFilter = new PageFilter();
59      newFilter.readFields(in);
60  
61      // Ensure the serialization preserved the filter by running a full test.
62      pageSizeTests(newFilter);
63    }
64  
65    private void pageSizeTests(Filter f) throws Exception {
66      testFiltersBeyondPageSize(f, ROW_LIMIT);
67    }
68  
69    private void testFiltersBeyondPageSize(final Filter f, final int pageSize) {
70      int count = 0;
71      for (int i = 0; i < (pageSize * 2); i++) {
72        boolean filterOut = f.filterRow();
73  
74        if(filterOut) {
75          break;
76        } else {
77          count++;
78        }
79  
80        // If at last row, should tell us to skip all remaining
81        if(count == pageSize) {
82          assertTrue(f.filterAllRemaining());
83        } else {
84          assertFalse(f.filterAllRemaining());
85        }
86  
87      }
88      assertEquals(pageSize, count);
89    }
90  }