1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 package org.apache.hadoop.hbase.util;
22
23 import java.io.IOException;
24 import java.util.ArrayList;
25 import java.util.List;
26
27 import org.apache.commons.logging.Log;
28 import org.apache.commons.logging.LogFactory;
29 import org.apache.hadoop.fs.Path;
30 import org.apache.hadoop.hbase.HBaseTestCase;
31 import org.apache.hadoop.hbase.HColumnDescriptor;
32 import org.apache.hadoop.hbase.HConstants;
33 import org.apache.hadoop.hbase.HRegionInfo;
34 import org.apache.hadoop.hbase.HTableDescriptor;
35 import org.apache.hadoop.hbase.KeyValue;
36 import org.apache.hadoop.hbase.client.Get;
37 import org.apache.hadoop.hbase.client.Put;
38 import org.apache.hadoop.hbase.client.Result;
39 import org.apache.hadoop.hbase.client.Scan;
40 import org.apache.hadoop.hbase.regionserver.wal.HLog;
41 import org.apache.hadoop.hbase.regionserver.HRegion;
42 import org.apache.hadoop.hbase.regionserver.InternalScanner;
43 import org.apache.hadoop.hdfs.MiniDFSCluster;
44 import org.apache.hadoop.util.ToolRunner;
45
46
47 public class TestMergeTool extends HBaseTestCase {
48 static final Log LOG = LogFactory.getLog(TestMergeTool.class);
49
50 static final byte [] FAMILY = Bytes.toBytes("contents");
51 static final byte [] QUALIFIER = Bytes.toBytes("dc");
52
53 private final HRegionInfo[] sourceRegions = new HRegionInfo[5];
54 private final HRegion[] regions = new HRegion[5];
55 private HTableDescriptor desc;
56 private byte [][][] rows;
57 private MiniDFSCluster dfsCluster = null;
58
59 @Override
60 public void setUp() throws Exception {
61
62 this.conf.setLong("hbase.zookeeper.recoverable.waittime", 1000);
63
64 this.conf.set("hbase.hstore.compactionThreshold", "2");
65
66
67 this.desc = new HTableDescriptor("TestMergeTool");
68 this.desc.addFamily(new HColumnDescriptor(FAMILY));
69
70
71
72
73
74 sourceRegions[0] = new HRegionInfo(this.desc, Bytes.toBytes("row_0200"),
75 Bytes.toBytes("row_0300"));
76
77
78
79 sourceRegions[1] =
80 new HRegionInfo(this.desc, Bytes.toBytes("row_0250"),
81 Bytes.toBytes("row_0400"));
82
83
84
85 sourceRegions[2] =
86 new HRegionInfo(this.desc, Bytes.toBytes("row_0100"),
87 Bytes.toBytes("row_0200"));
88
89
90
91
92 sourceRegions[3] =
93 new HRegionInfo(this.desc, Bytes.toBytes("row_0500"),
94 Bytes.toBytes("row_0600"));
95
96
97 sourceRegions[4] =
98 new HRegionInfo(this.desc, HConstants.EMPTY_BYTE_ARRAY,
99 HConstants.EMPTY_BYTE_ARRAY);
100
101
102
103
104 this.rows = new byte [5][][];
105 this.rows[0] = Bytes.toByteArrays(new String[] { "row_0210", "row_0280" });
106 this.rows[1] = Bytes.toByteArrays(new String[] { "row_0260", "row_0350",
107 "row_035" });
108 this.rows[2] = Bytes.toByteArrays(new String[] { "row_0110", "row_0175",
109 "row_0175", "row_0175"});
110 this.rows[3] = Bytes.toByteArrays(new String[] { "row_0525", "row_0560",
111 "row_0560", "row_0560", "row_0560"});
112 this.rows[4] = Bytes.toByteArrays(new String[] { "row_0050", "row_1000",
113 "row_1000", "row_1000", "row_1000", "row_1000" });
114
115
116 this.dfsCluster = new MiniDFSCluster(conf, 2, true, (String[])null);
117 this.fs = this.dfsCluster.getFileSystem();
118 System.out.println("fs=" + this.fs);
119 this.conf.set("fs.defaultFS", fs.getUri().toString());
120 Path parentdir = fs.getHomeDirectory();
121 conf.set(HConstants.HBASE_DIR, parentdir.toString());
122 fs.mkdirs(parentdir);
123 FSUtils.setVersion(fs, parentdir);
124
125
126
127
128 super.setUp();
129 try {
130
131 createRootAndMetaRegions();
132
133
134
135 for (int i = 0; i < sourceRegions.length; i++) {
136 regions[i] =
137 HRegion.createHRegion(this.sourceRegions[i], this.testDir, this.conf);
138
139
140
141 for (int j = 0; j < rows[i].length; j++) {
142 byte [] row = rows[i][j];
143 Put put = new Put(row);
144 put.add(FAMILY, QUALIFIER, row);
145 regions[i].put(put);
146 }
147 HRegion.addRegionToMETA(meta, regions[i]);
148 }
149
150 closeRootAndMeta();
151
152 } catch (Exception e) {
153 shutdownDfs(dfsCluster);
154 throw e;
155 }
156 }
157
158 @Override
159 public void tearDown() throws Exception {
160 super.tearDown();
161 shutdownDfs(dfsCluster);
162 }
163
164
165
166
167
168
169
170
171
172
173 private HRegion mergeAndVerify(final String msg, final String regionName1,
174 final String regionName2, final HLog log, final int upperbound)
175 throws Exception {
176 Merge merger = new Merge(this.conf);
177 LOG.info(msg);
178 System.out.println("fs2=" + this.conf.get("fs.defaultFS"));
179 int errCode = ToolRunner.run(this.conf, merger,
180 new String[] {this.desc.getNameAsString(), regionName1, regionName2}
181 );
182 assertTrue("'" + msg + "' failed", errCode == 0);
183 HRegionInfo mergedInfo = merger.getMergedHRegionInfo();
184
185
186
187 HRegion merged = HRegion.openHRegion(mergedInfo, log, this.conf);
188 verifyMerge(merged, upperbound);
189 merged.close();
190 LOG.info("Verified " + msg);
191 return merged;
192 }
193
194 private void verifyMerge(final HRegion merged, final int upperbound)
195 throws IOException {
196
197 Scan scan = new Scan();
198 scan.addFamily(FAMILY);
199 InternalScanner scanner = merged.getScanner(scan);
200 try {
201 List<KeyValue> testRes = null;
202 while (true) {
203 testRes = new ArrayList<KeyValue>();
204 boolean hasNext = scanner.next(testRes);
205 if (!hasNext) {
206 break;
207 }
208 }
209 } finally {
210 scanner.close();
211 }
212
213
214
215 for (int i = 0; i < upperbound; i++) {
216 for (int j = 0; j < rows[i].length; j++) {
217 Get get = new Get(rows[i][j]);
218 get.addFamily(FAMILY);
219 Result result = merged.get(get, null);
220 assertEquals(1, result.size());
221 byte [] bytes = result.sorted()[0].getValue();
222 assertNotNull(Bytes.toStringBinary(rows[i][j]), bytes);
223 assertTrue(Bytes.equals(bytes, rows[i][j]));
224 }
225 }
226 }
227
228
229
230
231
232 public void testMergeTool() throws Exception {
233
234
235 for (int i = 0; i < regions.length; i++) {
236 for (int j = 0; j < rows[i].length; j++) {
237 Get get = new Get(rows[i][j]);
238 get.addFamily(FAMILY);
239 Result result = regions[i].get(get, null);
240 byte [] bytes = result.sorted()[0].getValue();
241 assertNotNull(bytes);
242 assertTrue(Bytes.equals(bytes, rows[i][j]));
243 }
244
245 regions[i].close();
246 regions[i].getLog().closeAndDelete();
247 }
248
249
250 Path logPath = new Path("/tmp", HConstants.HREGION_LOGDIR_NAME + "_" +
251 System.currentTimeMillis());
252 LOG.info("Creating log " + logPath.toString());
253 Path oldLogDir = new Path("/tmp", HConstants.HREGION_OLDLOGDIR_NAME);
254 HLog log = new HLog(this.fs, logPath, oldLogDir, this.conf);
255 try {
256
257 HRegion merged = mergeAndVerify("merging regions 0 and 1",
258 this.sourceRegions[0].getRegionNameAsString(),
259 this.sourceRegions[1].getRegionNameAsString(), log, 2);
260
261
262 merged = mergeAndVerify("merging regions 0+1 and 2",
263 merged.getRegionInfo().getRegionNameAsString(),
264 this.sourceRegions[2].getRegionNameAsString(), log, 3);
265
266
267 merged = mergeAndVerify("merging regions 0+1+2 and 3",
268 merged.getRegionInfo().getRegionNameAsString(),
269 this.sourceRegions[3].getRegionNameAsString(), log, 4);
270
271
272 merged = mergeAndVerify("merging regions 0+1+2+3 and 4",
273 merged.getRegionInfo().getRegionNameAsString(),
274 this.sourceRegions[4].getRegionNameAsString(), log, rows.length);
275 } finally {
276 log.closeAndDelete();
277 }
278 }
279 }