1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 package org.apache.hadoop.hbase.filter;
22
23 import java.io.DataInput;
24 import java.io.DataOutput;
25 import java.io.IOException;
26 import java.util.Random;
27
28 import org.apache.hadoop.hbase.KeyValue;
29
30
31
32
33
34 public class RandomRowFilter extends FilterBase {
35 protected static final Random random = new Random();
36
37 protected float chance;
38 protected boolean filterOutRow;
39
40
41
42
43 public RandomRowFilter() {
44 }
45
46
47
48
49
50
51 public RandomRowFilter(float chance) {
52 this.chance = chance;
53 }
54
55
56
57
58 public float getChance() {
59 return chance;
60 }
61
62
63
64
65
66
67 public void setChance(float chance) {
68 this.chance = chance;
69 }
70
71 @Override
72 public boolean filterAllRemaining() {
73 return false;
74 }
75
76 @Override
77 public ReturnCode filterKeyValue(KeyValue v) {
78 if (filterOutRow) {
79 return ReturnCode.NEXT_ROW;
80 }
81 return ReturnCode.INCLUDE;
82 }
83
84 @Override
85 public boolean filterRow() {
86 return filterOutRow;
87 }
88
89 @Override
90 public boolean filterRowKey(byte[] buffer, int offset, int length) {
91 if (chance < 0) {
92
93 filterOutRow = true;
94 } else if (chance > 1) {
95
96 filterOutRow = false;
97 } else {
98
99 filterOutRow = !(random.nextFloat() < chance);
100 }
101 return filterOutRow;
102 }
103
104 @Override
105 public void reset() {
106 filterOutRow = false;
107 }
108
109 @Override
110 public void readFields(DataInput in) throws IOException {
111 chance = in.readFloat();
112 }
113
114 @Override
115 public void write(DataOutput out) throws IOException {
116 out.writeFloat(chance);
117 }
118 }