1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17 package org.apache.hadoop.hbase.io.encoding;
18
19 import static org.junit.Assert.assertEquals;
20
21 import java.io.IOException;
22 import java.util.ArrayList;
23 import java.util.Arrays;
24 import java.util.Collections;
25 import java.util.List;
26 import java.util.Random;
27
28 import org.apache.commons.logging.Log;
29 import org.apache.commons.logging.LogFactory;
30 import org.apache.hadoop.conf.Configuration;
31 import org.apache.hadoop.hbase.HBaseTestingUtility;
32 import org.apache.hadoop.hbase.HColumnDescriptor;
33 import org.apache.hadoop.hbase.HConstants;
34 import org.apache.hadoop.hbase.HTableDescriptor;
35 import org.apache.hadoop.hbase.KeyValue;
36 import org.apache.hadoop.hbase.LargeTests;
37 import org.apache.hadoop.hbase.TableName;
38 import org.apache.hadoop.hbase.client.Get;
39 import org.apache.hadoop.hbase.client.HBaseAdmin;
40 import org.apache.hadoop.hbase.client.HTable;
41 import org.apache.hadoop.hbase.client.Put;
42 import org.apache.hadoop.hbase.client.Result;
43 import org.apache.hadoop.hbase.client.Durability;
44 import org.apache.hadoop.hbase.ipc.RpcClient;
45 import org.apache.hadoop.hbase.ipc.RpcServer;
46 import org.apache.hadoop.hbase.regionserver.HRegionServer;
47 import org.apache.hadoop.hbase.util.Bytes;
48 import org.apache.hadoop.hbase.util.Threads;
49 import org.apache.log4j.Level;
50 import org.junit.After;
51 import org.junit.AfterClass;
52 import org.junit.Before;
53 import org.junit.BeforeClass;
54 import org.junit.Test;
55 import org.junit.experimental.categories.Category;
56 import org.apache.commons.logging.impl.Log4JLogger;
57
58
59
60
61 @Category(LargeTests.class)
62 public class TestChangingEncoding {
63 private static final Log LOG = LogFactory.getLog(TestChangingEncoding.class);
64 static final String CF = "EncodingTestCF";
65 static final byte[] CF_BYTES = Bytes.toBytes(CF);
66
67 private static final int NUM_ROWS_PER_BATCH = 100;
68 private static final int NUM_COLS_PER_ROW = 20;
69
70 private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
71 private static final Configuration conf = TEST_UTIL.getConfiguration();
72
73 private static final int TIMEOUT_MS = 600000;
74
75 private HBaseAdmin admin;
76 private HColumnDescriptor hcd;
77
78 private String tableName;
79 private static final List<DataBlockEncoding> ENCODINGS_TO_ITERATE =
80 createEncodingsToIterate();
81
82 private static final List<DataBlockEncoding> createEncodingsToIterate() {
83 List<DataBlockEncoding> encodings = new ArrayList<DataBlockEncoding>(
84 Arrays.asList(DataBlockEncoding.values()));
85 encodings.add(DataBlockEncoding.NONE);
86 return Collections.unmodifiableList(encodings);
87 }
88
89
90 private int numBatchesWritten;
91
92 private void prepareTest(String testId) throws IOException {
93 tableName = "test_table_" + testId;
94 HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(tableName));
95 hcd = new HColumnDescriptor(CF);
96 htd.addFamily(hcd);
97 admin.createTable(htd);
98 numBatchesWritten = 0;
99 }
100
101 @BeforeClass
102 public static void setUpBeforeClass() throws Exception {
103
104 conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024);
105
106
107 TEST_UTIL.startMiniCluster();
108 }
109
110 @AfterClass
111 public static void tearDownAfterClass() throws Exception {
112 TEST_UTIL.shutdownMiniCluster();
113 }
114
115 @Before
116 public void setUp() throws Exception {
117 admin = new HBaseAdmin(conf);
118 }
119
120 @After
121 public void tearDown() throws IOException {
122 admin.close();
123 }
124
125 private static byte[] getRowKey(int batchId, int i) {
126 return Bytes.toBytes("batch" + batchId + "_row" + i);
127 }
128
129 private static byte[] getQualifier(int j) {
130 return Bytes.toBytes("col" + j);
131 }
132
133 private static byte[] getValue(int batchId, int i, int j) {
134 return Bytes.toBytes("value_for_" + Bytes.toString(getRowKey(batchId, i))
135 + "_col" + j);
136 }
137
138 static void writeTestDataBatch(Configuration conf, String tableName,
139 int batchId) throws Exception {
140 LOG.debug("Writing test data batch " + batchId);
141 HTable table = new HTable(conf, tableName);
142 for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) {
143 Put put = new Put(getRowKey(batchId, i));
144 for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
145 put.add(CF_BYTES, getQualifier(j),
146 getValue(batchId, i, j));
147 }
148 put.setDurability(Durability.SKIP_WAL);
149 table.put(put);
150 }
151 table.close();
152 }
153
154 static void verifyTestDataBatch(Configuration conf, String tableName,
155 int batchId) throws Exception {
156 LOG.debug("Verifying test data batch " + batchId);
157 HTable table = new HTable(conf, tableName);
158 for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) {
159 Get get = new Get(getRowKey(batchId, i));
160 Result result = table.get(get);
161 for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
162 KeyValue kv = result.getColumnLatest(CF_BYTES, getQualifier(j));
163 assertEquals(Bytes.toStringBinary(getValue(batchId, i, j)),
164 Bytes.toStringBinary(kv.getValue()));
165 }
166 }
167 table.close();
168 }
169
170 private void writeSomeNewData() throws Exception {
171 writeTestDataBatch(conf, tableName, numBatchesWritten);
172 ++numBatchesWritten;
173 }
174
175 private void verifyAllData() throws Exception {
176 for (int i = 0; i < numBatchesWritten; ++i) {
177 verifyTestDataBatch(conf, tableName, i);
178 }
179 }
180
181 private void setEncodingConf(DataBlockEncoding encoding,
182 boolean encodeOnDisk) throws IOException {
183 LOG.debug("Setting CF encoding to " + encoding + " (ordinal="
184 + encoding.ordinal() + "), encodeOnDisk=" + encodeOnDisk);
185 admin.disableTable(tableName);
186 hcd.setDataBlockEncoding(encoding);
187 hcd.setEncodeOnDisk(encodeOnDisk);
188 admin.modifyColumn(tableName, hcd);
189 admin.enableTable(tableName);
190 }
191
192 @Test(timeout=TIMEOUT_MS)
193 public void testChangingEncoding() throws Exception {
194 prepareTest("ChangingEncoding");
195 for (boolean encodeOnDisk : new boolean[]{false, true}) {
196 for (DataBlockEncoding encoding : ENCODINGS_TO_ITERATE) {
197 LOG.info("encoding=" + encoding + ", encodeOnDisk=" + encodeOnDisk);
198 setEncodingConf(encoding, encodeOnDisk);
199 writeSomeNewData();
200 verifyAllData();
201 }
202 }
203 }
204
205 @Test(timeout=TIMEOUT_MS)
206 public void testChangingEncodingWithCompaction() throws Exception {
207 prepareTest("ChangingEncodingWithCompaction");
208 for (boolean encodeOnDisk : new boolean[]{false, true}) {
209 for (DataBlockEncoding encoding : ENCODINGS_TO_ITERATE) {
210 setEncodingConf(encoding, encodeOnDisk);
211 writeSomeNewData();
212 verifyAllData();
213 compactAndWait();
214 verifyAllData();
215 }
216 }
217 }
218
219 @Test(timeout=TIMEOUT_MS)
220 public void testFlippingEncodeOnDisk() throws Exception {
221 prepareTest("FlippingEncodeOnDisk");
222
223
224 DataBlockEncoding[] encodings = new DataBlockEncoding[] {
225 DataBlockEncoding.NONE, DataBlockEncoding.FAST_DIFF };
226 for (DataBlockEncoding encoding : encodings) {
227 boolean[] flagValues;
228 if (encoding == DataBlockEncoding.NONE) {
229
230 flagValues =
231 new boolean[] { HColumnDescriptor.DEFAULT_ENCODE_ON_DISK };
232 } else {
233 flagValues = new boolean[] { false, true, false, true };
234 }
235 for (boolean encodeOnDisk : flagValues) {
236 setEncodingConf(encoding, encodeOnDisk);
237 writeSomeNewData();
238 verifyAllData();
239 compactAndWait();
240 verifyAllData();
241 }
242 }
243 }
244
245 private void compactAndWait() throws IOException, InterruptedException {
246 LOG.debug("Compacting table " + tableName);
247 HRegionServer rs = TEST_UTIL.getMiniHBaseCluster().getRegionServer(0);
248 admin.majorCompact(tableName);
249
250
251 final long maxWaitime = System.currentTimeMillis() + 500;
252 boolean cont;
253 do {
254 cont = rs.compactSplitThread.getCompactionQueueSize() == 0;
255 Threads.sleep(1);
256 } while (cont && System.currentTimeMillis() < maxWaitime);
257
258 while (rs.compactSplitThread.getCompactionQueueSize() > 0) {
259 Threads.sleep(1);
260 }
261 LOG.debug("Compaction queue size reached 0, continuing");
262 }
263
264 @Test
265 public void testCrazyRandomChanges() throws Exception {
266 prepareTest("RandomChanges");
267 Random rand = new Random(2934298742974297L);
268 for (int i = 0; i < 20; ++i) {
269 int encodingOrdinal = rand.nextInt(DataBlockEncoding.values().length);
270 DataBlockEncoding encoding = DataBlockEncoding.values()[encodingOrdinal];
271 setEncodingConf(encoding, rand.nextBoolean());
272 writeSomeNewData();
273 verifyAllData();
274 }
275 }
276
277 }