1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.mapreduce;
20
21 import static org.junit.Assert.assertArrayEquals;
22 import static org.junit.Assert.assertEquals;
23 import static org.junit.Assert.assertTrue;
24 import static org.junit.Assert.fail;
25
26 import java.io.IOException;
27 import java.util.TreeMap;
28
29 import org.apache.hadoop.conf.Configuration;
30 import org.apache.hadoop.fs.FSDataOutputStream;
31 import org.apache.hadoop.fs.FileStatus;
32 import org.apache.hadoop.fs.FileSystem;
33 import org.apache.hadoop.fs.Path;
34 import org.apache.hadoop.hbase.HBaseTestingUtility;
35 import org.apache.hadoop.hbase.HColumnDescriptor;
36 import org.apache.hadoop.hbase.HConstants;
37 import org.apache.hadoop.hbase.HTableDescriptor;
38 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
39 import org.apache.hadoop.hbase.testclassification.LargeTests;
40 import org.apache.hadoop.hbase.TableNotFoundException;
41 import org.apache.hadoop.hbase.NamespaceDescriptor;
42 import org.apache.hadoop.hbase.TableName;
43 import org.apache.hadoop.hbase.client.HTable;
44 import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags;
45 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
46 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
47 import org.apache.hadoop.hbase.io.hfile.HFile;
48 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
49 import org.apache.hadoop.hbase.regionserver.BloomType;
50 import org.apache.hadoop.hbase.util.Bytes;
51 import org.apache.hadoop.hbase.util.HFileTestUtil;
52 import org.junit.AfterClass;
53 import org.junit.BeforeClass;
54 import org.junit.Rule;
55 import org.junit.Test;
56 import org.junit.experimental.categories.Category;
57 import org.apache.hadoop.hbase.security.SecureBulkLoadUtil;
58 import org.junit.rules.TestName;
59
60
61
62
63
64
65 @Category(LargeTests.class)
66 public class TestLoadIncrementalHFiles {
67 @Rule
68 public TestName tn = new TestName();
69
70 private static final byte[] QUALIFIER = Bytes.toBytes("myqual");
71 private static final byte[] FAMILY = Bytes.toBytes("myfam");
72 private static final String NAMESPACE = "bulkNS";
73
74 static final String EXPECTED_MSG_FOR_NON_EXISTING_FAMILY = "Unmatched family names found";
75 static final int MAX_FILES_PER_REGION_PER_FAMILY = 4;
76
77 private static final byte[][] SPLIT_KEYS = new byte[][] {
78 Bytes.toBytes("ddd"),
79 Bytes.toBytes("ppp")
80 };
81
82 static HBaseTestingUtility util = new HBaseTestingUtility();
83
84 @BeforeClass
85 public static void setUpBeforeClass() throws Exception {
86 util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,"");
87 util.getConfiguration().setInt(
88 LoadIncrementalHFiles.MAX_FILES_PER_REGION_PER_FAMILY,
89 MAX_FILES_PER_REGION_PER_FAMILY);
90
91 util.getConfiguration().set(HConstants.RPC_CODEC_CONF_KEY,
92 KeyValueCodecWithTags.class.getCanonicalName());
93
94 util.getConfiguration().setInt("hfile.format.version", 3);
95 util.startMiniCluster();
96
97 setupNamespace();
98 }
99
100 protected static void setupNamespace() throws Exception {
101 util.getHBaseAdmin().createNamespace(NamespaceDescriptor.create(NAMESPACE).build());
102 }
103
104 @AfterClass
105 public static void tearDownAfterClass() throws Exception {
106 util.shutdownMiniCluster();
107 }
108
109
110
111
112
113 @Test
114 public void testSimpleLoad() throws Exception {
115 runTest("testSimpleLoad", BloomType.NONE,
116 new byte[][][] {
117 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") },
118 new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") },
119 });
120 }
121
122
123
124
125
126 @Test
127 public void testRegionCrossingLoad() throws Exception {
128 runTest("testRegionCrossingLoad", BloomType.NONE,
129 new byte[][][] {
130 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
131 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
132 });
133 }
134
135
136
137
138 @Test
139 public void testRegionCrossingRowBloom() throws Exception {
140 runTest("testRegionCrossingLoadRowBloom", BloomType.ROW,
141 new byte[][][] {
142 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
143 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
144 });
145 }
146
147
148
149
150 @Test
151 public void testRegionCrossingRowColBloom() throws Exception {
152 runTest("testRegionCrossingLoadRowColBloom", BloomType.ROWCOL,
153 new byte[][][] {
154 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
155 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
156 });
157 }
158
159
160
161
162
163 @Test
164 public void testSimpleHFileSplit() throws Exception {
165 runTest("testHFileSplit", BloomType.NONE,
166 new byte[][] {
167 Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"),
168 Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"),
169 },
170 new byte[][][] {
171 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("lll") },
172 new byte[][]{ Bytes.toBytes("mmm"), Bytes.toBytes("zzz") },
173 }
174 );
175 }
176
177
178
179
180
181 @Test
182 public void testRegionCrossingHFileSplit() throws Exception {
183 testRegionCrossingHFileSplit(BloomType.NONE);
184 }
185
186
187
188
189
190 @Test
191 public void testRegionCrossingHFileSplitRowBloom() throws Exception {
192 testRegionCrossingHFileSplit(BloomType.ROW);
193 }
194
195
196
197
198
199 @Test
200 public void testRegionCrossingHFileSplitRowColBloom() throws Exception {
201 testRegionCrossingHFileSplit(BloomType.ROWCOL);
202 }
203
204 private void testRegionCrossingHFileSplit(BloomType bloomType) throws Exception {
205 runTest("testHFileSplit" + bloomType + "Bloom", bloomType,
206 new byte[][] {
207 Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"),
208 Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"),
209 },
210 new byte[][][] {
211 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
212 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
213 }
214 );
215 }
216
217 private HTableDescriptor buildHTD(TableName tableName, BloomType bloomType) {
218 HTableDescriptor htd = new HTableDescriptor(tableName);
219 HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
220 familyDesc.setBloomFilterType(bloomType);
221 htd.addFamily(familyDesc);
222 return htd;
223 }
224
225 private void runTest(String testName, BloomType bloomType,
226 byte[][][] hfileRanges) throws Exception {
227 runTest(testName, bloomType, null, hfileRanges);
228 }
229
230 private void runTest(String testName, BloomType bloomType,
231 byte[][] tableSplitKeys, byte[][][] hfileRanges) throws Exception {
232 final byte[] TABLE_NAME = Bytes.toBytes("mytable_"+testName);
233 final boolean preCreateTable = tableSplitKeys != null;
234
235
236 final TableName TABLE_WITHOUT_NS = TableName.valueOf(TABLE_NAME);
237 runTest(testName, TABLE_WITHOUT_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges);
238
239
240 final TableName TABLE_WITH_NS = TableName.valueOf(Bytes.toBytes(NAMESPACE), TABLE_NAME);
241 runTest(testName, TABLE_WITH_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges);
242 }
243
244 private void runTest(String testName, TableName tableName, BloomType bloomType,
245 boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges) throws Exception {
246 HTableDescriptor htd = buildHTD(tableName, bloomType);
247 runTest(testName, htd, bloomType, preCreateTable, tableSplitKeys, hfileRanges);
248 }
249
250 private void runTest(String testName, HTableDescriptor htd, BloomType bloomType,
251 boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges) throws Exception {
252 Path dir = util.getDataTestDirOnTestFS(testName);
253 FileSystem fs = util.getTestFileSystem();
254 dir = dir.makeQualified(fs);
255 Path familyDir = new Path(dir, Bytes.toString(FAMILY));
256
257 int hfileIdx = 0;
258 for (byte[][] range : hfileRanges) {
259 byte[] from = range[0];
260 byte[] to = range[1];
261 HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_"
262 + hfileIdx++), FAMILY, QUALIFIER, from, to, 1000);
263 }
264 int expectedRows = hfileIdx * 1000;
265
266 if (preCreateTable) {
267 util.getHBaseAdmin().createTable(htd, tableSplitKeys);
268 }
269
270 final TableName tableName = htd.getTableName();
271 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
272 String [] args= {dir.toString(), tableName.toString()};
273 loader.run(args);
274
275 HTable table = new HTable(util.getConfiguration(), tableName);
276 try {
277 assertEquals(expectedRows, util.countRows(table));
278 } finally {
279 table.close();
280 }
281
282
283 Path stagingBasePath = SecureBulkLoadUtil.getBaseStagingDir(util.getConfiguration());
284 if(fs.exists(stagingBasePath)) {
285 FileStatus[] files = fs.listStatus(stagingBasePath);
286 for(FileStatus file : files) {
287 assertTrue("Folder=" + file.getPath() + " is not cleaned up.",
288 file.getPath().getName() != "DONOTERASE");
289 }
290 }
291
292 util.deleteTable(tableName);
293 }
294
295
296
297
298
299
300
301 @Test(timeout = 60000)
302 public void htestTagsSurviveBulkLoadSplit() throws Exception {
303 Path dir = util.getDataTestDirOnTestFS(tn.getMethodName());
304 FileSystem fs = util.getTestFileSystem();
305 dir = dir.makeQualified(fs);
306 Path familyDir = new Path(dir, Bytes.toString(FAMILY));
307
308 byte [][] tableSplitKeys = new byte[][] {
309 Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"),
310 Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"),
311 };
312
313
314 byte[] from = Bytes.toBytes("ddd");
315 byte[] to = Bytes.toBytes("ooo");
316 HFileTestUtil.createHFileWithTags(util.getConfiguration(), fs,
317 new Path(familyDir, tn.getMethodName()+"_hfile"),
318 FAMILY, QUALIFIER, from, to, 1000);
319 int expectedRows = 1000;
320
321 TableName tableName = TableName.valueOf(tn.getMethodName());
322 HTableDescriptor htd = buildHTD(tableName, BloomType.NONE);
323 util.getHBaseAdmin().createTable(htd, tableSplitKeys);
324
325 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
326 String [] args= {dir.toString(), tableName.toString()};
327 loader.run(args);
328
329 HTable table = new HTable(util.getConfiguration(), tableName);
330 try {
331 assertEquals(expectedRows, util.countRows(table));
332 HFileTestUtil.verifyTags(table);
333 } finally {
334 table.close();
335 }
336
337 util.deleteTable(tableName);
338 }
339
340
341
342
343 @Test
344 public void testNonexistentColumnFamilyLoad() throws Exception {
345 String testName = "testNonexistentColumnFamilyLoad";
346 byte[][][] hFileRanges = new byte[][][] {
347 new byte[][]{ Bytes.toBytes("aaa"), Bytes.toBytes("ccc") },
348 new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") },
349 };
350
351 final byte[] TABLE = Bytes.toBytes("mytable_"+testName);
352 HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE));
353
354
355 HColumnDescriptor family =
356 new HColumnDescriptor(Bytes.toBytes(new String(FAMILY).toUpperCase()));
357 htd.addFamily(family);
358
359 try {
360 runTest(testName, htd, BloomType.NONE, true, SPLIT_KEYS, hFileRanges);
361 assertTrue("Loading into table with non-existent family should have failed", false);
362 } catch (Exception e) {
363 assertTrue("IOException expected", e instanceof IOException);
364
365 String errMsg = e.getMessage();
366 assertTrue("Incorrect exception message, expected message: ["
367 + EXPECTED_MSG_FOR_NON_EXISTING_FAMILY + "], current message: [" + errMsg + "]",
368 errMsg.contains(EXPECTED_MSG_FOR_NON_EXISTING_FAMILY));
369 }
370 }
371
372 @Test(timeout = 60000)
373 public void testNonHfileFolderWithUnmatchedFamilyName() throws Exception {
374 testNonHfileFolder("testNonHfileFolderWithUnmatchedFamilyName", true);
375 }
376
377 @Test(timeout = 60000)
378 public void testNonHfileFolder() throws Exception {
379 testNonHfileFolder("testNonHfileFolder", false);
380 }
381
382
383
384
385
386
387 private void testNonHfileFolder(String tableName, boolean preCreateTable) throws Exception {
388 Path dir = util.getDataTestDirOnTestFS(tableName);
389 FileSystem fs = util.getTestFileSystem();
390 dir = dir.makeQualified(fs);
391
392 Path familyDir = new Path(dir, Bytes.toString(FAMILY));
393 HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_0"),
394 FAMILY, QUALIFIER, Bytes.toBytes("begin"), Bytes.toBytes("end"), 500);
395 createRandomDataFile(fs, new Path(familyDir, "012356789"), 16 * 1024);
396
397 final String NON_FAMILY_FOLDER = "_logs";
398 Path nonFamilyDir = new Path(dir, NON_FAMILY_FOLDER);
399 fs.mkdirs(nonFamilyDir);
400 fs.mkdirs(new Path(nonFamilyDir, "non-file"));
401 createRandomDataFile(fs, new Path(nonFamilyDir, "012356789"), 16 * 1024);
402
403 HTable table = null;
404 try {
405 if (preCreateTable) {
406 table = util.createTable(TableName.valueOf(tableName), FAMILY);
407 } else {
408 table = new HTable(util.getConfiguration(), TableName.valueOf(tableName));
409 }
410
411 final String[] args = {dir.toString(), tableName};
412 new LoadIncrementalHFiles(util.getConfiguration()).run(args);
413 assertEquals(500, util.countRows(table));
414 } finally {
415 if (table != null) {
416 table.close();
417 }
418 fs.delete(dir, true);
419 }
420 }
421
422 private static void createRandomDataFile(FileSystem fs, Path path, int size)
423 throws IOException {
424 FSDataOutputStream stream = fs.create(path);
425 try {
426 byte[] data = new byte[1024];
427 for (int i = 0; i < data.length; ++i) {
428 data[i] = (byte)(i & 0xff);
429 }
430 while (size >= data.length) {
431 stream.write(data, 0, data.length);
432 size -= data.length;
433 }
434 if (size > 0) {
435 stream.write(data, 0, size);
436 }
437 } finally {
438 stream.close();
439 }
440 }
441
442 @Test
443 public void testSplitStoreFile() throws IOException {
444 Path dir = util.getDataTestDirOnTestFS("testSplitHFile");
445 FileSystem fs = util.getTestFileSystem();
446 Path testIn = new Path(dir, "testhfile");
447 HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
448 HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER,
449 Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
450
451 Path bottomOut = new Path(dir, "bottom.out");
452 Path topOut = new Path(dir, "top.out");
453
454 LoadIncrementalHFiles.splitStoreFile(
455 util.getConfiguration(), testIn,
456 familyDesc, Bytes.toBytes("ggg"),
457 bottomOut,
458 topOut);
459
460 int rowCount = verifyHFile(bottomOut);
461 rowCount += verifyHFile(topOut);
462 assertEquals(1000, rowCount);
463 }
464
465 @Test
466 public void testSplitStoreFileWithNoneToNone() throws IOException {
467 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE);
468 }
469
470 @Test
471 public void testSplitStoreFileWithEncodedToEncoded() throws IOException {
472 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF);
473 }
474
475 @Test
476 public void testSplitStoreFileWithEncodedToNone() throws IOException {
477 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE);
478 }
479
480 @Test
481 public void testSplitStoreFileWithNoneToEncoded() throws IOException {
482 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF);
483 }
484
485 private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding,
486 DataBlockEncoding cfEncoding) throws IOException {
487 Path dir = util.getDataTestDirOnTestFS("testSplitStoreFileWith"
488 + bulkloadEncoding + "To" + cfEncoding);
489 FileSystem fs = util.getTestFileSystem();
490 Path testIn = new Path(dir, "testhfile");
491 HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
492 familyDesc.setDataBlockEncoding(cfEncoding);
493 HFileTestUtil.createHFileWithDataBlockEncoding(
494 util.getConfiguration(), fs, testIn, bulkloadEncoding,
495 FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
496
497 Path bottomOut = new Path(dir, "bottom.out");
498 Path topOut = new Path(dir, "top.out");
499
500 LoadIncrementalHFiles.splitStoreFile(
501 util.getConfiguration(), testIn,
502 familyDesc, Bytes.toBytes("ggg"),
503 bottomOut,
504 topOut);
505
506 int rowCount = verifyHFile(bottomOut);
507 rowCount += verifyHFile(topOut);
508 assertEquals(1000, rowCount);
509 }
510
511 private int verifyHFile(Path p) throws IOException {
512 Configuration conf = util.getConfiguration();
513 HFile.Reader reader = HFile.createReader(
514 p.getFileSystem(conf), p, new CacheConfig(conf), conf);
515 reader.loadFileInfo();
516 HFileScanner scanner = reader.getScanner(false, false);
517 scanner.seekTo();
518 int count = 0;
519 do {
520 count++;
521 } while (scanner.next());
522 assertTrue(count > 0);
523 reader.close();
524 return count;
525 }
526
527 private void addStartEndKeysForTest(TreeMap<byte[], Integer> map, byte[] first, byte[] last) {
528 Integer value = map.containsKey(first)?map.get(first):0;
529 map.put(first, value+1);
530
531 value = map.containsKey(last)?map.get(last):0;
532 map.put(last, value-1);
533 }
534
535 @Test
536 public void testInferBoundaries() {
537 TreeMap<byte[], Integer> map = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
538
539
540
541
542
543
544
545
546
547
548
549 String first;
550 String last;
551
552 first = "a"; last = "e";
553 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
554
555 first = "r"; last = "s";
556 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
557
558 first = "o"; last = "p";
559 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
560
561 first = "g"; last = "k";
562 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
563
564 first = "v"; last = "x";
565 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
566
567 first = "c"; last = "i";
568 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
569
570 first = "m"; last = "q";
571 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
572
573 first = "s"; last = "t";
574 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
575
576 first = "u"; last = "w";
577 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
578
579 byte[][] keysArray = LoadIncrementalHFiles.inferBoundaries(map);
580 byte[][] compare = new byte[3][];
581 compare[0] = "m".getBytes();
582 compare[1] = "r".getBytes();
583 compare[2] = "u".getBytes();
584
585 assertEquals(keysArray.length, 3);
586
587 for (int row = 0; row<keysArray.length; row++){
588 assertArrayEquals(keysArray[row], compare[row]);
589 }
590 }
591
592 @Test
593 public void testLoadTooMayHFiles() throws Exception {
594 Path dir = util.getDataTestDirOnTestFS("testLoadTooMayHFiles");
595 FileSystem fs = util.getTestFileSystem();
596 dir = dir.makeQualified(fs);
597 Path familyDir = new Path(dir, Bytes.toString(FAMILY));
598
599 byte[] from = Bytes.toBytes("begin");
600 byte[] to = Bytes.toBytes("end");
601 for (int i = 0; i <= MAX_FILES_PER_REGION_PER_FAMILY; i++) {
602 HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_"
603 + i), FAMILY, QUALIFIER, from, to, 1000);
604 }
605
606 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
607 String [] args= {dir.toString(), "mytable_testLoadTooMayHFiles"};
608 try {
609 loader.run(args);
610 fail("Bulk loading too many files should fail");
611 } catch (IOException ie) {
612 assertTrue(ie.getMessage().contains("Trying to load more than "
613 + MAX_FILES_PER_REGION_PER_FAMILY + " hfiles"));
614 }
615 }
616
617 @Test(expected = TableNotFoundException.class)
618 public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception {
619 Configuration conf = util.getConfiguration();
620 conf.set(LoadIncrementalHFiles.CREATE_TABLE_CONF_KEY, "no");
621 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
622 String[] args = { "directory", "nonExistingTable" };
623 loader.run(args);
624 }
625
626 @Test
627 public void testTableWithCFNameStartWithUnderScore() throws Exception {
628 Path dir = util.getDataTestDirOnTestFS("cfNameStartWithUnderScore");
629 FileSystem fs = util.getTestFileSystem();
630 dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory());
631 String family = "_cf";
632 Path familyDir = new Path(dir, family);
633
634 byte[] from = Bytes.toBytes("begin");
635 byte[] to = Bytes.toBytes("end");
636 Configuration conf = util.getConfiguration();
637 String tableName = "mytable_cfNameStartWithUnderScore";
638 HTable table = util.createTable(tableName, family);
639 HFileTestUtil.createHFile(conf, fs, new Path(familyDir, "hfile"), Bytes.toBytes(family),
640 QUALIFIER, from, to, 1000);
641
642 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
643 String[] args = { dir.toString(), tableName };
644 try {
645 loader.run(args);
646 assertEquals(1000, util.countRows(table));
647 } finally {
648 if (null != table) {
649 table.close();
650 }
651 }
652 }
653 }
654