1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.mapreduce;
20
21 import static org.junit.Assert.assertEquals;
22 import static org.junit.Assert.assertTrue;
23
24 import java.io.IOException;
25 import java.util.ArrayList;
26 import java.util.Arrays;
27 import java.util.HashSet;
28 import java.util.List;
29 import java.util.Set;
30 import java.util.UUID;
31
32 import org.apache.commons.logging.Log;
33 import org.apache.commons.logging.LogFactory;
34 import org.apache.hadoop.conf.Configurable;
35 import org.apache.hadoop.conf.Configuration;
36 import org.apache.hadoop.fs.FSDataOutputStream;
37 import org.apache.hadoop.fs.FileStatus;
38 import org.apache.hadoop.fs.FileSystem;
39 import org.apache.hadoop.fs.Path;
40 import org.apache.hadoop.hbase.Cell;
41 import org.apache.hadoop.hbase.CellUtil;
42 import org.apache.hadoop.hbase.HBaseTestingUtility;
43 import org.apache.hadoop.hbase.HConstants;
44 import org.apache.hadoop.hbase.LargeTests;
45 import org.apache.hadoop.hbase.TableName;
46 import org.apache.hadoop.hbase.TableNotFoundException;
47 import org.apache.hadoop.hbase.client.HTable;
48 import org.apache.hadoop.hbase.client.Result;
49 import org.apache.hadoop.hbase.client.ResultScanner;
50 import org.apache.hadoop.hbase.client.Scan;
51 import org.apache.hadoop.hbase.util.Bytes;
52 import org.apache.hadoop.io.Text;
53 import org.apache.hadoop.mapred.Utils.OutputFileUtils.OutputFilesFilter;
54 import org.apache.hadoop.mapreduce.Job;
55 import org.apache.hadoop.util.GenericOptionsParser;
56 import org.apache.hadoop.util.Tool;
57 import org.apache.hadoop.util.ToolRunner;
58 import org.junit.AfterClass;
59 import org.junit.BeforeClass;
60 import org.junit.Test;
61 import org.junit.experimental.categories.Category;
62
63 @Category(LargeTests.class)
64 public class TestImportTsv implements Configurable {
65
66 protected static final Log LOG = LogFactory.getLog(TestImportTsv.class);
67 protected static final String NAME = TestImportTsv.class.getSimpleName();
68 protected static HBaseTestingUtility util = new HBaseTestingUtility();
69
70
71
72
73
74 protected static final String DELETE_AFTER_LOAD_CONF = NAME + ".deleteAfterLoad";
75
76
77
78
79 protected static final String FORCE_COMBINER_CONF = NAME + ".forceCombiner";
80
81 private final String FAMILY = "FAM";
82
83 public Configuration getConf() {
84 return util.getConfiguration();
85 }
86
87 public void setConf(Configuration conf) {
88 throw new IllegalArgumentException("setConf not supported");
89 }
90
91 @BeforeClass
92 public static void provisionCluster() throws Exception {
93 util.startMiniCluster();
94 util.startMiniMapReduceCluster();
95 }
96
97 @AfterClass
98 public static void releaseCluster() throws Exception {
99 util.shutdownMiniMapReduceCluster();
100 util.shutdownMiniCluster();
101 }
102
103 @Test
104 public void testMROnTable() throws Exception {
105 String table = "test-" + UUID.randomUUID();
106
107
108 String[] args = new String[] {
109 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
110 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
111 table
112 };
113
114 util.createTable(table, FAMILY);
115 doMROnTableTest(util, FAMILY, null, args, 1);
116 util.deleteTable(table);
117 }
118
119 @Test
120 public void testMROnTableWithTimestamp() throws Exception {
121 String table = "test-" + UUID.randomUUID();
122
123
124 String[] args = new String[] {
125 "-D" + ImportTsv.COLUMNS_CONF_KEY
126 + "=HBASE_ROW_KEY,HBASE_TS_KEY,FAM:A,FAM:B",
127 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
128 table
129 };
130 String data = "KEY,1234,VALUE1,VALUE2\n";
131
132 util.createTable(table, FAMILY);
133 doMROnTableTest(util, FAMILY, data, args, 1);
134 util.deleteTable(table);
135 }
136
137
138 @Test
139 public void testMROnTableWithCustomMapper()
140 throws Exception {
141 String table = "test-" + UUID.randomUUID();
142
143
144 String[] args = new String[] {
145 "-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapper",
146 table
147 };
148
149 util.createTable(table, FAMILY);
150 doMROnTableTest(util, FAMILY, null, args, 3);
151 util.deleteTable(table);
152 }
153
154 @Test
155 public void testBulkOutputWithoutAnExistingTable() throws Exception {
156 String table = "test-" + UUID.randomUUID();
157
158
159 Path hfiles = new Path(util.getDataTestDirOnTestFS(table), "hfiles");
160 String[] args = new String[] {
161 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
162 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
163 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
164 table
165 };
166
167 doMROnTableTest(util, FAMILY, null, args, 3);
168 util.deleteTable(table);
169 }
170
171 @Test
172 public void testBulkOutputWithAnExistingTable() throws Exception {
173 String table = "test-" + UUID.randomUUID();
174
175
176 Path hfiles = new Path(util.getDataTestDirOnTestFS(table), "hfiles");
177 String[] args = new String[] {
178 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
179 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
180 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
181 table
182 };
183
184 util.createTable(table, FAMILY);
185 doMROnTableTest(util, FAMILY, null, args, 3);
186 util.deleteTable(table);
187 }
188
189 @Test
190 public void testJobConfigurationsWithTsvImporterTextMapper() throws Exception {
191 String table = "test-" + UUID.randomUUID();
192 Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table),"hfiles");
193 String INPUT_FILE = "InputFile1.csv";
194
195 String[] args =
196 new String[] {
197 "-D" + ImportTsv.MAPPER_CONF_KEY
198 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
199 "-D" + ImportTsv.COLUMNS_CONF_KEY
200 + "=HBASE_ROW_KEY,FAM:A,FAM:B",
201 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
202 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table,
203 INPUT_FILE
204 };
205 GenericOptionsParser opts = new GenericOptionsParser(util.getConfiguration(), args);
206 args = opts.getRemainingArgs();
207 Job job = ImportTsv.createSubmittableJob(util.getConfiguration(), args);
208 assertTrue(job.getMapperClass().equals(TsvImporterTextMapper.class));
209 assertTrue(job.getReducerClass().equals(TextSortReducer.class));
210 assertTrue(job.getMapOutputValueClass().equals(Text.class));
211 }
212
213 @Test
214 public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
215 String table = "test-" + UUID.randomUUID();
216 String FAMILY = "FAM";
217 Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table),"hfiles");
218
219 String[] args =
220 new String[] {
221 "-D" + ImportTsv.MAPPER_CONF_KEY
222 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
223 "-D" + ImportTsv.COLUMNS_CONF_KEY
224 + "=HBASE_ROW_KEY,FAM:A,FAM:B",
225 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
226 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table
227 };
228 String data = "KEY\u001bVALUE4\u001bVALUE8\n";
229 doMROnTableTest(util, FAMILY, data, args, 4);
230 }
231
232 @Test(expected = TableNotFoundException.class)
233 public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception {
234 String table = "test-" + UUID.randomUUID();
235 String[] args =
236 new String[] { table, "/inputFile" };
237
238 Configuration conf = new Configuration(util.getConfiguration());
239 conf.set(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A");
240 conf.set(ImportTsv.BULK_OUTPUT_CONF_KEY, "/output");
241 conf.set(ImportTsv.CREATE_TABLE_CONF_KEY, "no");
242 ImportTsv.createSubmittableJob(conf, args);
243 }
244
245 @Test(expected = TableNotFoundException.class)
246 public void testMRWithoutAnExistingTable() throws Exception {
247 String table = "test-" + UUID.randomUUID();
248 String[] args =
249 new String[] { table, "/inputFile" };
250
251 Configuration conf = new Configuration(util.getConfiguration());
252 ImportTsv.createSubmittableJob(conf, args);
253 }
254
255 protected static Tool doMROnTableTest(HBaseTestingUtility util, String family,
256 String data, String[] args) throws Exception {
257 return doMROnTableTest(util, family, data, args, 1);
258 }
259
260
261
262
263
264
265
266
267
268 protected static Tool doMROnTableTest(HBaseTestingUtility util, String family,
269 String data, String[] args, int valueMultiplier)
270 throws Exception {
271 String table = args[args.length - 1];
272 Configuration conf = new Configuration(util.getConfiguration());
273
274
275 FileSystem fs = FileSystem.get(conf);
276 Path inputPath = fs.makeQualified(new Path(util.getDataTestDirOnTestFS(table), "input.dat"));
277 FSDataOutputStream op = fs.create(inputPath, true);
278 if (data == null) {
279 data = "KEY\u001bVALUE1\u001bVALUE2\n";
280 }
281 op.write(Bytes.toBytes(data));
282 op.close();
283 LOG.debug(String.format("Wrote test data to file: %s", inputPath));
284
285 if (conf.getBoolean(FORCE_COMBINER_CONF, true)) {
286 LOG.debug("Forcing combiner.");
287 conf.setInt("min.num.spills.for.combine", 1);
288 }
289
290
291 List<String> argv = new ArrayList<String>(Arrays.asList(args));
292 argv.add(inputPath.toString());
293 Tool tool = new ImportTsv();
294 LOG.debug("Running ImportTsv with arguments: " + argv);
295 assertEquals(0, ToolRunner.run(conf, tool, argv.toArray(args)));
296
297
298
299
300 boolean createdHFiles = false;
301 String outputPath = null;
302 for (String arg : argv) {
303 if (arg.contains(ImportTsv.BULK_OUTPUT_CONF_KEY)) {
304 createdHFiles = true;
305
306 outputPath = arg.split("=")[1];
307 break;
308 }
309 }
310
311 if (createdHFiles)
312 validateHFiles(fs, outputPath, family);
313 else
314 validateTable(conf, table, family, valueMultiplier);
315
316 if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
317 LOG.debug("Deleting test subdirectory");
318 util.cleanupDataTestDirOnTestFS(table);
319 }
320 return tool;
321 }
322
323
324
325
326 private static void validateTable(Configuration conf, String tableName,
327 String family, int valueMultiplier) throws IOException {
328
329 LOG.debug("Validating table.");
330 HTable table = new HTable(conf, tableName);
331 boolean verified = false;
332 long pause = conf.getLong("hbase.client.pause", 5 * 1000);
333 int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
334 for (int i = 0; i < numRetries; i++) {
335 try {
336 Scan scan = new Scan();
337
338 scan.addFamily(Bytes.toBytes(family));
339 ResultScanner resScanner = table.getScanner(scan);
340 for (Result res : resScanner) {
341 assertTrue(res.size() == 2);
342 List<Cell> kvs = res.listCells();
343 assertTrue(CellUtil.matchingRow(kvs.get(0), Bytes.toBytes("KEY")));
344 assertTrue(CellUtil.matchingRow(kvs.get(1), Bytes.toBytes("KEY")));
345 assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier)));
346 assertTrue(CellUtil.matchingValue(kvs.get(1), Bytes.toBytes("VALUE" + 2 * valueMultiplier)));
347
348 }
349 verified = true;
350 break;
351 } catch (NullPointerException e) {
352
353
354 }
355 try {
356 Thread.sleep(pause);
357 } catch (InterruptedException e) {
358
359 }
360 }
361 table.close();
362 assertTrue(verified);
363 }
364
365
366
367
368 private static void validateHFiles(FileSystem fs, String outputPath, String family)
369 throws IOException {
370
371
372 LOG.debug("Validating HFiles.");
373 Set<String> configFamilies = new HashSet<String>();
374 configFamilies.add(family);
375 Set<String> foundFamilies = new HashSet<String>();
376 for (FileStatus cfStatus : fs.listStatus(new Path(outputPath), new OutputFilesFilter())) {
377 String[] elements = cfStatus.getPath().toString().split(Path.SEPARATOR);
378 String cf = elements[elements.length - 1];
379 foundFamilies.add(cf);
380 assertTrue(
381 String.format(
382 "HFile ouput contains a column family (%s) not present in input families (%s)",
383 cf, configFamilies),
384 configFamilies.contains(cf));
385 for (FileStatus hfile : fs.listStatus(cfStatus.getPath())) {
386 assertTrue(
387 String.format("HFile %s appears to contain no data.", hfile.getPath()),
388 hfile.getLen() > 0);
389 }
390 }
391 }
392 }
393