1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 package org.apache.hadoop.hbase.util;
22
23 import java.io.IOException;
24 import java.util.ArrayList;
25 import java.util.List;
26
27 import org.apache.commons.logging.Log;
28 import org.apache.commons.logging.LogFactory;
29 import org.apache.hadoop.fs.Path;
30 import org.apache.hadoop.hbase.*;
31 import org.apache.hadoop.hbase.client.Get;
32 import org.apache.hadoop.hbase.client.Put;
33 import org.apache.hadoop.hbase.client.Result;
34 import org.apache.hadoop.hbase.client.Scan;
35 import org.apache.hadoop.hbase.regionserver.wal.HLog;
36 import org.apache.hadoop.hbase.regionserver.HRegion;
37 import org.apache.hadoop.hbase.regionserver.InternalScanner;
38 import org.apache.hadoop.hdfs.MiniDFSCluster;
39 import org.apache.hadoop.util.ToolRunner;
40 import org.junit.experimental.categories.Category;
41
42
43 @Category(LargeTests.class)
44 public class TestMergeTool extends HBaseTestCase {
45 static final Log LOG = LogFactory.getLog(TestMergeTool.class);
46 HBaseTestingUtility TEST_UTIL;
47
48 static final byte [] FAMILY = Bytes.toBytes("contents");
49 static final byte [] QUALIFIER = Bytes.toBytes("dc");
50
51 private final HRegionInfo[] sourceRegions = new HRegionInfo[5];
52 private final HRegion[] regions = new HRegion[5];
53 private HTableDescriptor desc;
54 private byte [][][] rows;
55 private MiniDFSCluster dfsCluster = null;
56
57 @Override
58 public void setUp() throws Exception {
59
60 this.conf.setLong("hbase.zookeeper.recoverable.waittime", 1000);
61
62
63
64
65 this.conf.setInt(HConstants.ZOOKEEPER_CLIENT_PORT, 10001);
66
67 this.conf.set("hbase.hstore.compactionThreshold", "2");
68
69
70 this.desc = new HTableDescriptor("TestMergeTool");
71 this.desc.addFamily(new HColumnDescriptor(FAMILY));
72
73
74
75
76
77 sourceRegions[0] = new HRegionInfo(this.desc.getName(),
78 Bytes.toBytes("row_0200"),
79 Bytes.toBytes("row_0300"));
80
81
82
83 sourceRegions[1] =
84 new HRegionInfo(this.desc.getName(),
85 Bytes.toBytes("row_0250"),
86 Bytes.toBytes("row_0400"));
87
88
89
90 sourceRegions[2] =
91 new HRegionInfo(this.desc.getName(),
92 Bytes.toBytes("row_0100"),
93 Bytes.toBytes("row_0200"));
94
95
96
97
98 sourceRegions[3] =
99 new HRegionInfo(this.desc.getName(),
100 Bytes.toBytes("row_0500"),
101 Bytes.toBytes("row_0600"));
102
103
104 sourceRegions[4] =
105 new HRegionInfo(this.desc.getName(),
106 HConstants.EMPTY_BYTE_ARRAY,
107 HConstants.EMPTY_BYTE_ARRAY);
108
109
110
111
112 this.rows = new byte [5][][];
113 this.rows[0] = Bytes.toByteArrays(new String[] { "row_0210", "row_0280" });
114 this.rows[1] = Bytes.toByteArrays(new String[] { "row_0260", "row_0350",
115 "row_035" });
116 this.rows[2] = Bytes.toByteArrays(new String[] { "row_0110", "row_0175",
117 "row_0175", "row_0175"});
118 this.rows[3] = Bytes.toByteArrays(new String[] { "row_0525", "row_0560",
119 "row_0560", "row_0560", "row_0560"});
120 this.rows[4] = Bytes.toByteArrays(new String[] { "row_0050", "row_1000",
121 "row_1000", "row_1000", "row_1000", "row_1000" });
122
123
124 TEST_UTIL = new HBaseTestingUtility(conf);
125 this.dfsCluster = TEST_UTIL.startMiniDFSCluster(2);
126 this.fs = this.dfsCluster.getFileSystem();
127 System.out.println("fs=" + this.fs);
128 this.conf.set("fs.defaultFS", fs.getUri().toString());
129 Path parentdir = fs.getHomeDirectory();
130 conf.set(HConstants.HBASE_DIR, parentdir.toString());
131 fs.mkdirs(parentdir);
132 FSUtils.setVersion(fs, parentdir);
133
134
135
136
137 super.setUp();
138 try {
139
140 createRootAndMetaRegions();
141 FSTableDescriptors.createTableDescriptor(this.fs, this.testDir, this.desc);
142
143
144
145 for (int i = 0; i < sourceRegions.length; i++) {
146 regions[i] =
147 HRegion.createHRegion(this.sourceRegions[i], this.testDir, this.conf,
148 this.desc);
149
150
151
152 for (int j = 0; j < rows[i].length; j++) {
153 byte [] row = rows[i][j];
154 Put put = new Put(row);
155 put.add(FAMILY, QUALIFIER, row);
156 regions[i].put(put);
157 }
158 HRegion.addRegionToMETA(meta, regions[i]);
159 }
160
161 closeRootAndMeta();
162
163 } catch (Exception e) {
164 TEST_UTIL.shutdownMiniCluster();
165 throw e;
166 }
167 }
168
169 @Override
170 public void tearDown() throws Exception {
171 super.tearDown();
172 for (int i = 0; i < sourceRegions.length; i++) {
173 HRegion r = regions[i];
174 if (r != null) {
175 r.close();
176 r.getLog().closeAndDelete();
177 }
178 }
179 TEST_UTIL.shutdownMiniCluster();
180 }
181
182
183
184
185
186
187
188
189
190
191 private HRegion mergeAndVerify(final String msg, final String regionName1,
192 final String regionName2, final HLog log, final int upperbound)
193 throws Exception {
194 Merge merger = new Merge(this.conf);
195 LOG.info(msg);
196 System.out.println("fs2=" + this.conf.get("fs.defaultFS"));
197 int errCode = ToolRunner.run(this.conf, merger,
198 new String[] {this.desc.getNameAsString(), regionName1, regionName2}
199 );
200 assertTrue("'" + msg + "' failed with errCode " + errCode, errCode == 0);
201 HRegionInfo mergedInfo = merger.getMergedHRegionInfo();
202
203
204
205 HRegion merged = HRegion.openHRegion(mergedInfo, this.desc, log, this.conf);
206 verifyMerge(merged, upperbound);
207 merged.close();
208 LOG.info("Verified " + msg);
209 return merged;
210 }
211
212 private void verifyMerge(final HRegion merged, final int upperbound)
213 throws IOException {
214
215 Scan scan = new Scan();
216 scan.addFamily(FAMILY);
217 InternalScanner scanner = merged.getScanner(scan);
218 try {
219 List<KeyValue> testRes = null;
220 while (true) {
221 testRes = new ArrayList<KeyValue>();
222 boolean hasNext = scanner.next(testRes);
223 if (!hasNext) {
224 break;
225 }
226 }
227 } finally {
228 scanner.close();
229 }
230
231
232
233 for (int i = 0; i < upperbound; i++) {
234 for (int j = 0; j < rows[i].length; j++) {
235 Get get = new Get(rows[i][j]);
236 get.addFamily(FAMILY);
237 Result result = merged.get(get, null);
238 assertEquals(1, result.size());
239 byte [] bytes = result.raw()[0].getValue();
240 assertNotNull(Bytes.toStringBinary(rows[i][j]), bytes);
241 assertTrue(Bytes.equals(bytes, rows[i][j]));
242 }
243 }
244 }
245
246
247
248
249
250 public void testMergeTool() throws Exception {
251
252
253 for (int i = 0; i < regions.length; i++) {
254 for (int j = 0; j < rows[i].length; j++) {
255 Get get = new Get(rows[i][j]);
256 get.addFamily(FAMILY);
257 Result result = regions[i].get(get, null);
258 byte [] bytes = result.raw()[0].getValue();
259 assertNotNull(bytes);
260 assertTrue(Bytes.equals(bytes, rows[i][j]));
261 }
262
263 regions[i].close();
264 regions[i].getLog().closeAndDelete();
265 }
266
267
268 Path logPath = new Path("/tmp", HConstants.HREGION_LOGDIR_NAME + "_" +
269 System.currentTimeMillis());
270 LOG.info("Creating log " + logPath.toString());
271 Path oldLogDir = new Path("/tmp", HConstants.HREGION_OLDLOGDIR_NAME);
272 HLog log = new HLog(this.fs, logPath, oldLogDir, this.conf);
273 try {
274
275 HRegion merged = mergeAndVerify("merging regions 0 and 1",
276 this.sourceRegions[0].getRegionNameAsString(),
277 this.sourceRegions[1].getRegionNameAsString(), log, 2);
278
279
280 merged = mergeAndVerify("merging regions 0+1 and 2",
281 merged.getRegionInfo().getRegionNameAsString(),
282 this.sourceRegions[2].getRegionNameAsString(), log, 3);
283
284
285 merged = mergeAndVerify("merging regions 0+1+2 and 3",
286 merged.getRegionInfo().getRegionNameAsString(),
287 this.sourceRegions[3].getRegionNameAsString(), log, 4);
288
289
290 merged = mergeAndVerify("merging regions 0+1+2+3 and 4",
291 merged.getRegionInfo().getRegionNameAsString(),
292 this.sourceRegions[4].getRegionNameAsString(), log, rows.length);
293 } finally {
294 log.closeAndDelete();
295 }
296 }
297
298 @org.junit.Rule
299 public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
300 new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
301 }
302