1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.filter;
21
22 import java.io.ByteArrayInputStream;
23 import java.io.ByteArrayOutputStream;
24 import java.io.DataInputStream;
25 import java.io.DataOutputStream;
26
27 import junit.framework.TestCase;
28 import org.apache.hadoop.hbase.SmallTests;
29 import org.junit.experimental.categories.Category;
30
31
32
33
34 @Category(SmallTests.class)
35 public class TestPageFilter extends TestCase {
36 static final int ROW_LIMIT = 3;
37
38
39
40
41
42 public void testPageSize() throws Exception {
43 Filter f = new PageFilter(ROW_LIMIT);
44 pageSizeTests(f);
45 }
46
47
48
49
50
51 public void testSerialization() throws Exception {
52 Filter f = new PageFilter(ROW_LIMIT);
53
54 ByteArrayOutputStream stream = new ByteArrayOutputStream();
55 DataOutputStream out = new DataOutputStream(stream);
56 f.write(out);
57 out.close();
58 byte[] buffer = stream.toByteArray();
59
60 DataInputStream in = new DataInputStream(new ByteArrayInputStream(buffer));
61 Filter newFilter = new PageFilter();
62 newFilter.readFields(in);
63
64
65 pageSizeTests(newFilter);
66 }
67
68 private void pageSizeTests(Filter f) throws Exception {
69 testFiltersBeyondPageSize(f, ROW_LIMIT);
70 }
71
72 private void testFiltersBeyondPageSize(final Filter f, final int pageSize) {
73 int count = 0;
74 for (int i = 0; i < (pageSize * 2); i++) {
75 boolean filterOut = f.filterRow();
76
77 if(filterOut) {
78 break;
79 } else {
80 count++;
81 }
82
83
84 if(count == pageSize) {
85 assertTrue(f.filterAllRemaining());
86 } else {
87 assertFalse(f.filterAllRemaining());
88 }
89
90 }
91 assertEquals(pageSize, count);
92 }
93
94 @org.junit.Rule
95 public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
96 new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
97 }
98