1   /**
2    * Copyright 2007 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase.filter;
21  
22  import java.io.ByteArrayInputStream;
23  import java.io.ByteArrayOutputStream;
24  import java.io.DataInputStream;
25  import java.io.DataOutputStream;
26  
27  import org.apache.hadoop.hbase.SmallTests;
28  import org.apache.hadoop.hbase.util.Bytes;
29  
30  import junit.framework.TestCase;
31  import org.junit.experimental.categories.Category;
32  
33  /**
34   * Tests the inclusive stop row filter
35   */
36  @Category(SmallTests.class)
37  public class TestInclusiveStopFilter extends TestCase {
38    private final byte [] STOP_ROW = Bytes.toBytes("stop_row");
39    private final byte [] GOOD_ROW = Bytes.toBytes("good_row");
40    private final byte [] PAST_STOP_ROW = Bytes.toBytes("zzzzzz");
41  
42    Filter mainFilter;
43  
44    @Override
45    protected void setUp() throws Exception {
46      super.setUp();
47      mainFilter = new InclusiveStopFilter(STOP_ROW);
48    }
49  
50    /**
51     * Tests identification of the stop row
52     * @throws Exception
53     */
54    public void testStopRowIdentification() throws Exception {
55      stopRowTests(mainFilter);
56    }
57  
58    /**
59     * Tests serialization
60     * @throws Exception
61     */
62    public void testSerialization() throws Exception {
63      // Decompose mainFilter to bytes.
64      ByteArrayOutputStream stream = new ByteArrayOutputStream();
65      DataOutputStream out = new DataOutputStream(stream);
66      mainFilter.write(out);
67      out.close();
68      byte[] buffer = stream.toByteArray();
69  
70      // Recompose mainFilter.
71      DataInputStream in = new DataInputStream(new ByteArrayInputStream(buffer));
72      Filter newFilter = new InclusiveStopFilter();
73      newFilter.readFields(in);
74  
75      // Ensure the serialization preserved the filter by running a full test.
76      stopRowTests(newFilter);
77    }
78  
79    private void stopRowTests(Filter filter) throws Exception {
80      assertFalse("Filtering on " + Bytes.toString(GOOD_ROW),
81        filter.filterRowKey(GOOD_ROW, 0, GOOD_ROW.length));
82      assertFalse("Filtering on " + Bytes.toString(STOP_ROW),
83        filter.filterRowKey(STOP_ROW, 0, STOP_ROW.length));
84      assertTrue("Filtering on " + Bytes.toString(PAST_STOP_ROW),
85        filter.filterRowKey(PAST_STOP_ROW, 0, PAST_STOP_ROW.length));
86  
87      assertTrue("FilterAllRemaining", filter.filterAllRemaining());
88      assertFalse("FilterNotNull", filter.filterRow());
89  
90      assertFalse("Filter a null", filter.filterRowKey(null, 0, 0));
91    }
92  
93    @org.junit.Rule
94    public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
95      new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
96  }
97