1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17 package org.apache.hadoop.hbase.io.hfile;
18
19 import java.util.ArrayList;
20 import java.util.List;
21
22 import org.apache.hadoop.conf.Configuration;
23 import org.apache.hadoop.fs.FileSystem;
24 import org.apache.hadoop.fs.Path;
25 import org.apache.hadoop.hbase.HBaseTestingUtility;
26 import org.apache.hadoop.hbase.util.Bytes;
27 import org.junit.experimental.categories.Category;
28 import org.apache.hadoop.hbase.SmallTests;
29 import org.junit.Test;
30
31
32
33
34
35
36
37
38
39
40
41
42 @Category(SmallTests.class)
43 public class TestHFileInlineToRootChunkConversion {
44 private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
45 private final Configuration conf = testUtil.getConfiguration();
46
47 @Test
48 public void testWriteHFile() throws Exception {
49 Path hfPath = new Path(testUtil.getDataTestDir(),
50 TestHFileInlineToRootChunkConversion.class.getSimpleName() + ".hfile");
51 int maxChunkSize = 1024;
52 FileSystem fs = FileSystem.get(conf);
53 CacheConfig cacheConf = new CacheConfig(conf);
54 conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);
55 HFileWriterV2 hfw =
56 (HFileWriterV2) new HFileWriterV2.WriterFactoryV2(conf, cacheConf)
57 .withBlockSize(16)
58 .withPath(fs, hfPath).create();
59 List<byte[]> keys = new ArrayList<byte[]>();
60 StringBuilder sb = new StringBuilder();
61
62 for (int i = 0; i < 4; ++i) {
63 sb.append("key" + String.format("%05d", i));
64 sb.append("_");
65 for (int j = 0; j < 100; ++j) {
66 sb.append('0' + j);
67 }
68 String keyStr = sb.toString();
69 sb.setLength(0);
70
71 byte[] k = Bytes.toBytes(keyStr);
72 System.out.println("Key: " + Bytes.toString(k));
73 keys.add(k);
74 byte[] v = Bytes.toBytes("value" + i);
75 hfw.append(k, v);
76 }
77 hfw.close();
78
79 HFileReaderV2 reader = (HFileReaderV2) HFile.createReader(fs, hfPath, cacheConf);
80 HFileScanner scanner = reader.getScanner(true, true);
81 for (int i = 0; i < keys.size(); ++i) {
82 scanner.seekTo(keys.get(i));
83 }
84 reader.close();
85 }
86 }