1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17 package org.apache.hadoop.hbase.io.encoding;
18
19 import static org.apache.hadoop.hbase.io.encoding.TestChangingEncoding.CF;
20 import static org.apache.hadoop.hbase.io.encoding.TestChangingEncoding.CF_BYTES;
21
22 import java.util.concurrent.TimeUnit;
23
24 import org.apache.commons.logging.Log;
25 import org.apache.commons.logging.LogFactory;
26 import org.apache.hadoop.conf.Configuration;
27 import org.apache.hadoop.hbase.HBaseTestingUtility;
28 import org.apache.hadoop.hbase.HColumnDescriptor;
29 import org.apache.hadoop.hbase.HConstants;
30 import org.apache.hadoop.hbase.HTableDescriptor;
31 import org.apache.hadoop.hbase.LargeTests;
32 import org.apache.hadoop.hbase.client.HBaseAdmin;
33 import org.apache.hadoop.hbase.io.hfile.HFile;
34 import org.apache.hadoop.hbase.util.Bytes;
35 import org.junit.AfterClass;
36 import org.junit.BeforeClass;
37 import org.junit.Test;
38 import org.junit.experimental.categories.Category;
39
40 @Category(LargeTests.class)
41 public class TestUpgradeFromHFileV1ToEncoding {
42
43 private static final Log LOG =
44 LogFactory.getLog(TestUpgradeFromHFileV1ToEncoding.class);
45
46 private static final String TABLE = "UpgradeTable";
47 private static final byte[] TABLE_BYTES = Bytes.toBytes(TABLE);
48
49 private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
50 private static final Configuration conf = TEST_UTIL.getConfiguration();
51
52 private static final int NUM_HFILE_V1_BATCHES = 10;
53 private static final int NUM_HFILE_V2_BATCHES = 20;
54
55 @BeforeClass
56 public static void setUpBeforeClass() throws Exception {
57
58 conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024);
59 conf.setInt(HFile.FORMAT_VERSION_KEY, 1);
60 TEST_UTIL.startMiniCluster();
61 LOG.debug("Started an HFile v1 cluster");
62 }
63
64 @AfterClass
65 public static void tearDownAfterClass() throws Exception {
66 TEST_UTIL.shutdownMiniCluster();
67 }
68
69 @Test
70 public void testUpgrade() throws Exception {
71 int numBatches = 0;
72 HTableDescriptor htd = new HTableDescriptor(TABLE);
73 HColumnDescriptor hcd = new HColumnDescriptor(CF);
74 htd.addFamily(hcd);
75 HBaseAdmin admin = new HBaseAdmin(conf);
76 admin.createTable(htd);
77 admin.close();
78 for (int i = 0; i < NUM_HFILE_V1_BATCHES; ++i) {
79 TestChangingEncoding.writeTestDataBatch(conf, TABLE, numBatches++);
80 }
81 TEST_UTIL.shutdownMiniHBaseCluster();
82
83 conf.setInt(HFile.FORMAT_VERSION_KEY, 2);
84 TEST_UTIL.startMiniHBaseCluster(1, 1);
85 LOG.debug("Started an HFile v2 cluster");
86 admin = new HBaseAdmin(conf);
87 htd = admin.getTableDescriptor(TABLE_BYTES);
88 hcd = htd.getFamily(CF_BYTES);
89 hcd.setDataBlockEncoding(DataBlockEncoding.PREFIX);
90 admin.disableTable(TABLE);
91 admin.modifyColumn(TABLE, hcd);
92 admin.enableTable(TABLE);
93 admin.close();
94 for (int i = 0; i < NUM_HFILE_V2_BATCHES; ++i) {
95 TestChangingEncoding.writeTestDataBatch(conf, TABLE, numBatches++);
96 }
97
98 LOG.debug("Verifying all 'batches', both HFile v1 and encoded HFile v2");
99 verifyBatches(numBatches);
100
101 LOG.debug("Doing a manual compaction");
102 admin.compact(TABLE);
103 Thread.sleep(TimeUnit.SECONDS.toMillis(10));
104
105 LOG.debug("Verify all the data again");
106 verifyBatches(numBatches);
107 }
108
109 private void verifyBatches(int numBatches) throws Exception {
110 for (int i = 0; i < numBatches; ++i) {
111 TestChangingEncoding.verifyTestDataBatch(conf, TABLE, i);
112 }
113 }
114
115 }