1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17 package org.apache.hadoop.hbase.io.encoding;
18
19 import static org.junit.Assert.assertEquals;
20
21 import java.io.IOException;
22 import java.util.ArrayList;
23 import java.util.Arrays;
24 import java.util.Collections;
25 import java.util.List;
26 import java.util.Random;
27
28 import org.apache.commons.logging.Log;
29 import org.apache.commons.logging.LogFactory;
30 import org.apache.hadoop.conf.Configuration;
31 import org.apache.hadoop.hbase.HBaseTestingUtility;
32 import org.apache.hadoop.hbase.HColumnDescriptor;
33 import org.apache.hadoop.hbase.HConstants;
34 import org.apache.hadoop.hbase.HTableDescriptor;
35 import org.apache.hadoop.hbase.KeyValue;
36 import org.apache.hadoop.hbase.LargeTests;
37 import org.apache.hadoop.hbase.client.Get;
38 import org.apache.hadoop.hbase.client.HBaseAdmin;
39 import org.apache.hadoop.hbase.client.HTable;
40 import org.apache.hadoop.hbase.client.Put;
41 import org.apache.hadoop.hbase.client.Result;
42 import org.apache.hadoop.hbase.regionserver.HRegionServer;
43 import org.apache.hadoop.hbase.util.Bytes;
44 import org.apache.hadoop.hbase.util.Threads;
45 import org.junit.After;
46 import org.junit.AfterClass;
47 import org.junit.Before;
48 import org.junit.BeforeClass;
49 import org.junit.Test;
50 import org.junit.experimental.categories.Category;
51
52
53
54
55 @Category(LargeTests.class)
56 public class TestChangingEncoding {
57
58 private static final Log LOG = LogFactory.getLog(TestChangingEncoding.class);
59
60 static final String CF = "EncodingTestCF";
61 static final byte[] CF_BYTES = Bytes.toBytes(CF);
62
63 private static final int NUM_ROWS_PER_BATCH = 100;
64 private static final int NUM_COLS_PER_ROW = 20;
65
66 private static final HBaseTestingUtility TEST_UTIL =
67 new HBaseTestingUtility();
68 private static final Configuration conf = TEST_UTIL.getConfiguration();
69
70 private static final int TIMEOUT_MS = 240000;
71
72 private HBaseAdmin admin;
73 private HColumnDescriptor hcd;
74
75 private String tableName;
76 private static final List<DataBlockEncoding> ENCODINGS_TO_ITERATE =
77 createEncodingsToIterate();
78
79 private static final List<DataBlockEncoding> createEncodingsToIterate() {
80 List<DataBlockEncoding> encodings = new ArrayList<DataBlockEncoding>(
81 Arrays.asList(DataBlockEncoding.values()));
82 encodings.add(DataBlockEncoding.NONE);
83 return Collections.unmodifiableList(encodings);
84 }
85
86
87 private int numBatchesWritten;
88
89 private void prepareTest(String testId) throws IOException {
90 tableName = "test_table_" + testId;
91 HTableDescriptor htd = new HTableDescriptor(tableName);
92 hcd = new HColumnDescriptor(CF);
93 htd.addFamily(hcd);
94 admin.createTable(htd);
95 numBatchesWritten = 0;
96 }
97
98 @BeforeClass
99 public static void setUpBeforeClass() throws Exception {
100
101 conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024);
102 TEST_UTIL.startMiniCluster();
103 }
104
105 @AfterClass
106 public static void tearDownAfterClass() throws Exception {
107 TEST_UTIL.shutdownMiniCluster();
108 }
109
110 @Before
111 public void setUp() throws Exception {
112 admin = new HBaseAdmin(conf);
113 }
114
115 @After
116 public void tearDown() throws IOException {
117 admin.close();
118 }
119
120 private static byte[] getRowKey(int batchId, int i) {
121 return Bytes.toBytes("batch" + batchId + "_row" + i);
122 }
123
124 private static byte[] getQualifier(int j) {
125 return Bytes.toBytes("col" + j);
126 }
127
128 private static byte[] getValue(int batchId, int i, int j) {
129 return Bytes.toBytes("value_for_" + Bytes.toString(getRowKey(batchId, i))
130 + "_col" + j);
131 }
132
133 static void writeTestDataBatch(Configuration conf, String tableName,
134 int batchId) throws Exception {
135 LOG.debug("Writing test data batch " + batchId);
136 HTable table = new HTable(conf, tableName);
137 for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) {
138 Put put = new Put(getRowKey(batchId, i));
139 for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
140 put.add(CF_BYTES, getQualifier(j),
141 getValue(batchId, i, j));
142 table.put(put);
143 }
144 }
145 table.close();
146 }
147
148 static void verifyTestDataBatch(Configuration conf, String tableName,
149 int batchId) throws Exception {
150 LOG.debug("Verifying test data batch " + batchId);
151 HTable table = new HTable(conf, tableName);
152 for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) {
153 Get get = new Get(getRowKey(batchId, i));
154 Result result = table.get(get);
155 for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
156 KeyValue kv = result.getColumnLatest(CF_BYTES, getQualifier(j));
157 assertEquals(Bytes.toStringBinary(getValue(batchId, i, j)),
158 Bytes.toStringBinary(kv.getValue()));
159 }
160 }
161 table.close();
162 }
163
164 private void writeSomeNewData() throws Exception {
165 writeTestDataBatch(conf, tableName, numBatchesWritten);
166 ++numBatchesWritten;
167 }
168
169 private void verifyAllData() throws Exception {
170 for (int i = 0; i < numBatchesWritten; ++i) {
171 verifyTestDataBatch(conf, tableName, i);
172 }
173 }
174
175 private void setEncodingConf(DataBlockEncoding encoding,
176 boolean encodeOnDisk) throws IOException {
177 LOG.debug("Setting CF encoding to " + encoding + " (ordinal="
178 + encoding.ordinal() + "), encodeOnDisk=" + encodeOnDisk);
179 admin.disableTable(tableName);
180 hcd.setDataBlockEncoding(encoding);
181 hcd.setEncodeOnDisk(encodeOnDisk);
182 admin.modifyColumn(tableName, hcd);
183 admin.enableTable(tableName);
184 }
185
186 @Test(timeout=TIMEOUT_MS)
187 public void testChangingEncoding() throws Exception {
188 prepareTest("ChangingEncoding");
189 for (boolean encodeOnDisk : new boolean[]{false, true}) {
190 for (DataBlockEncoding encoding : ENCODINGS_TO_ITERATE) {
191 setEncodingConf(encoding, encodeOnDisk);
192 writeSomeNewData();
193 verifyAllData();
194 }
195 }
196 }
197
198 @Test(timeout=TIMEOUT_MS)
199 public void testChangingEncodingWithCompaction() throws Exception {
200 prepareTest("ChangingEncodingWithCompaction");
201 for (boolean encodeOnDisk : new boolean[]{false, true}) {
202 for (DataBlockEncoding encoding : ENCODINGS_TO_ITERATE) {
203 setEncodingConf(encoding, encodeOnDisk);
204 writeSomeNewData();
205 verifyAllData();
206 compactAndWait();
207 verifyAllData();
208 }
209 }
210 }
211
212 @Test(timeout=TIMEOUT_MS)
213 public void testFlippingEncodeOnDisk() throws Exception {
214 prepareTest("FlippingEncodeOnDisk");
215
216
217 DataBlockEncoding[] encodings = new DataBlockEncoding[] {
218 DataBlockEncoding.NONE, DataBlockEncoding.FAST_DIFF };
219 for (DataBlockEncoding encoding : encodings) {
220 boolean[] flagValues;
221 if (encoding == DataBlockEncoding.NONE) {
222
223 flagValues =
224 new boolean[] { HColumnDescriptor.DEFAULT_ENCODE_ON_DISK };
225 } else {
226 flagValues = new boolean[] { false, true, false, true };
227 }
228 for (boolean encodeOnDisk : flagValues) {
229 setEncodingConf(encoding, encodeOnDisk);
230 writeSomeNewData();
231 verifyAllData();
232 compactAndWait();
233 verifyAllData();
234 }
235 }
236 }
237
238 private void compactAndWait() throws IOException, InterruptedException {
239 LOG.debug("Compacting table " + tableName);
240 admin.majorCompact(tableName);
241 HRegionServer rs = TEST_UTIL.getMiniHBaseCluster().getRegionServer(0);
242
243
244 final long maxWaitime = System.currentTimeMillis() + 500;
245 boolean cont;
246 do {
247 cont = rs.compactSplitThread.getCompactionQueueSize() == 0;
248 Threads.sleep(1);
249 } while (cont && System.currentTimeMillis() < maxWaitime);
250
251 while (rs.compactSplitThread.getCompactionQueueSize() > 0) {
252 Threads.sleep(5);
253 }
254 LOG.debug("Compaction queue size reached 0, continuing");
255 }
256
257 @Test
258 public void testCrazyRandomChanges() throws Exception {
259 prepareTest("RandomChanges");
260 Random rand = new Random(2934298742974297L);
261 for (int i = 0; i < 20; ++i) {
262 int encodingOrdinal = rand.nextInt(DataBlockEncoding.values().length);
263 DataBlockEncoding encoding = DataBlockEncoding.values()[encodingOrdinal];
264 setEncodingConf(encoding, rand.nextBoolean());
265 writeSomeNewData();
266 verifyAllData();
267 }
268 }
269
270 }