1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.util;
21
22 import java.io.IOException;
23 import java.util.ArrayList;
24 import java.util.List;
25
26 import org.apache.commons.logging.Log;
27 import org.apache.commons.logging.LogFactory;
28 import org.apache.hadoop.fs.Path;
29 import org.apache.hadoop.hbase.*;
30 import org.apache.hadoop.hbase.client.Get;
31 import org.apache.hadoop.hbase.client.Put;
32 import org.apache.hadoop.hbase.client.Result;
33 import org.apache.hadoop.hbase.client.Scan;
34 import org.apache.hadoop.hbase.regionserver.wal.HLog;
35 import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
36 import org.apache.hadoop.hbase.regionserver.HRegion;
37 import org.apache.hadoop.hbase.regionserver.InternalScanner;
38 import org.apache.hadoop.hdfs.MiniDFSCluster;
39 import org.apache.hadoop.util.ToolRunner;
40 import org.junit.experimental.categories.Category;
41
42
43 @Category(LargeTests.class)
44 public class TestMergeTool extends HBaseTestCase {
45 static final Log LOG = LogFactory.getLog(TestMergeTool.class);
46 HBaseTestingUtility TEST_UTIL;
47
48 static final byte [] FAMILY = Bytes.toBytes("contents");
49 static final byte [] QUALIFIER = Bytes.toBytes("dc");
50
51 private final HRegionInfo[] sourceRegions = new HRegionInfo[5];
52 private final HRegion[] regions = new HRegion[5];
53 private HTableDescriptor desc;
54 private byte [][][] rows;
55 private MiniDFSCluster dfsCluster = null;
56
57 @Override
58 public void setUp() throws Exception {
59
60 this.conf.setLong("hbase.zookeeper.recoverable.waittime", 10);
61
62
63
64
65 this.conf.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, 10001);
66
67 this.conf.set("hbase.hstore.compactionThreshold", "2");
68
69
70 this.desc = new HTableDescriptor(TableName.valueOf("TestMergeTool"));
71 this.desc.addFamily(new HColumnDescriptor(FAMILY));
72
73
74
75
76
77 sourceRegions[0] = new HRegionInfo(this.desc.getTableName(),
78 Bytes.toBytes("row_0200"),
79 Bytes.toBytes("row_0300"));
80
81
82
83 sourceRegions[1] =
84 new HRegionInfo(this.desc.getTableName(),
85 Bytes.toBytes("row_0250"),
86 Bytes.toBytes("row_0400"));
87
88
89
90 sourceRegions[2] =
91 new HRegionInfo(this.desc.getTableName(),
92 Bytes.toBytes("row_0100"),
93 Bytes.toBytes("row_0200"));
94
95
96
97
98 sourceRegions[3] =
99 new HRegionInfo(this.desc.getTableName(),
100 Bytes.toBytes("row_0500"),
101 Bytes.toBytes("row_0600"));
102
103
104 sourceRegions[4] =
105 new HRegionInfo(this.desc.getTableName(),
106 HConstants.EMPTY_BYTE_ARRAY,
107 HConstants.EMPTY_BYTE_ARRAY);
108
109
110
111
112 this.rows = new byte [5][][];
113 this.rows[0] = Bytes.toByteArrays(new String[] { "row_0210", "row_0280" });
114 this.rows[1] = Bytes.toByteArrays(new String[] { "row_0260", "row_0350",
115 "row_035" });
116 this.rows[2] = Bytes.toByteArrays(new String[] { "row_0110", "row_0175",
117 "row_0175", "row_0175"});
118 this.rows[3] = Bytes.toByteArrays(new String[] { "row_0525", "row_0560",
119 "row_0560", "row_0560", "row_0560"});
120 this.rows[4] = Bytes.toByteArrays(new String[] { "row_0050", "row_1000",
121 "row_1000", "row_1000", "row_1000", "row_1000" });
122
123
124 TEST_UTIL = new HBaseTestingUtility(conf);
125 this.dfsCluster = TEST_UTIL.startMiniDFSCluster(2);
126 this.fs = this.dfsCluster.getFileSystem();
127 System.out.println("fs=" + this.fs);
128 FSUtils.setFsDefault(this.conf, new Path(fs.getUri()));
129 Path parentdir = fs.getHomeDirectory();
130 FSUtils.setRootDir(conf, parentdir);
131 fs.mkdirs(parentdir);
132 FSUtils.setVersion(fs, parentdir);
133
134
135
136
137 super.setUp();
138 try {
139
140 createMetaRegion();
141 new FSTableDescriptors(this.fs, this.testDir).createTableDescriptor(this.desc);
142
143
144
145 for (int i = 0; i < sourceRegions.length; i++) {
146 regions[i] =
147 HRegion.createHRegion(this.sourceRegions[i], this.testDir, this.conf,
148 this.desc);
149
150
151
152 for (int j = 0; j < rows[i].length; j++) {
153 byte [] row = rows[i][j];
154 Put put = new Put(row);
155 put.add(FAMILY, QUALIFIER, row);
156 regions[i].put(put);
157 }
158 HRegion.addRegionToMETA(meta, regions[i]);
159 }
160
161 closeRootAndMeta();
162
163 } catch (Exception e) {
164 TEST_UTIL.shutdownMiniCluster();
165 throw e;
166 }
167 }
168
169 @Override
170 public void tearDown() throws Exception {
171 super.tearDown();
172 for (int i = 0; i < sourceRegions.length; i++) {
173 HRegion r = regions[i];
174 if (r != null) {
175 HRegion.closeHRegion(r);
176 }
177 }
178 TEST_UTIL.shutdownMiniCluster();
179 }
180
181
182
183
184
185
186
187
188
189
190 private HRegion mergeAndVerify(final String msg, final String regionName1,
191 final String regionName2, final HLog log, final int upperbound)
192 throws Exception {
193 Merge merger = new Merge(this.conf);
194 LOG.info(msg);
195 LOG.info("fs2=" + this.conf.get("fs.defaultFS"));
196 int errCode = ToolRunner.run(this.conf, merger,
197 new String[] {this.desc.getTableName().getNameAsString(), regionName1, regionName2}
198 );
199 assertTrue("'" + msg + "' failed with errCode " + errCode, errCode == 0);
200 HRegionInfo mergedInfo = merger.getMergedHRegionInfo();
201
202
203
204 HRegion merged = HRegion.openHRegion(mergedInfo, this.desc, log, this.conf);
205 verifyMerge(merged, upperbound);
206 merged.close();
207 LOG.info("Verified " + msg);
208 return merged;
209 }
210
211 private void verifyMerge(final HRegion merged, final int upperbound)
212 throws IOException {
213
214 Scan scan = new Scan();
215 scan.addFamily(FAMILY);
216 InternalScanner scanner = merged.getScanner(scan);
217 try {
218 List<KeyValue> testRes = null;
219 while (true) {
220 testRes = new ArrayList<KeyValue>();
221 boolean hasNext = scanner.next(testRes);
222 if (!hasNext) {
223 break;
224 }
225 }
226 } finally {
227 scanner.close();
228 }
229
230
231
232 for (int i = 0; i < upperbound; i++) {
233 for (int j = 0; j < rows[i].length; j++) {
234 Get get = new Get(rows[i][j]);
235 get.addFamily(FAMILY);
236 Result result = merged.get(get);
237 assertEquals(1, result.size());
238 byte [] bytes = result.raw()[0].getValue();
239 assertNotNull(Bytes.toStringBinary(rows[i][j]), bytes);
240 assertTrue(Bytes.equals(bytes, rows[i][j]));
241 }
242 }
243 }
244
245
246
247
248
249 public void testMergeTool() throws Exception {
250
251
252 for (int i = 0; i < regions.length; i++) {
253 for (int j = 0; j < rows[i].length; j++) {
254 Get get = new Get(rows[i][j]);
255 get.addFamily(FAMILY);
256 Result result = regions[i].get(get);
257 byte [] bytes = result.raw()[0].getValue();
258 assertNotNull(bytes);
259 assertTrue(Bytes.equals(bytes, rows[i][j]));
260 }
261
262 HRegion.closeHRegion(regions[i]);
263 }
264
265
266 Path logPath = new Path("/tmp");
267 String logName = HConstants.HREGION_LOGDIR_NAME + "_"
268 + System.currentTimeMillis();
269 LOG.info("Creating log " + logPath.toString() + "/" + logName);
270
271 HLog log = HLogFactory.createHLog(this.fs, logPath,
272 logName, this.conf);
273
274 try {
275
276 HRegion merged = mergeAndVerify("merging regions 0 and 1 ",
277 this.sourceRegions[0].getRegionNameAsString(),
278 this.sourceRegions[1].getRegionNameAsString(), log, 2);
279
280
281 merged = mergeAndVerify("merging regions 0+1 and 2",
282 merged.getRegionInfo().getRegionNameAsString(),
283 this.sourceRegions[2].getRegionNameAsString(), log, 3);
284
285
286 merged = mergeAndVerify("merging regions 0+1+2 and 3",
287 merged.getRegionInfo().getRegionNameAsString(),
288 this.sourceRegions[3].getRegionNameAsString(), log, 4);
289
290
291 merged = mergeAndVerify("merging regions 0+1+2+3 and 4",
292 merged.getRegionInfo().getRegionNameAsString(),
293 this.sourceRegions[4].getRegionNameAsString(), log, rows.length);
294 } finally {
295 log.closeAndDelete();
296 }
297 }
298
299 }
300