1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.mapreduce;
19
20 import static org.junit.Assert.assertEquals;
21 import static org.junit.Assert.assertFalse;
22 import static org.junit.Assert.assertTrue;
23
24 import java.util.List;
25
26 import org.apache.hadoop.conf.Configuration;
27 import org.apache.hadoop.fs.FileStatus;
28 import org.apache.hadoop.fs.FileSystem;
29 import org.apache.hadoop.fs.Path;
30 import org.apache.hadoop.hbase.TableName;
31 import org.apache.hadoop.hbase.HBaseTestingUtility;
32 import org.apache.hadoop.hbase.HColumnDescriptor;
33 import org.apache.hadoop.hbase.HConstants;
34 import org.apache.hadoop.hbase.HRegionInfo;
35 import org.apache.hadoop.hbase.HTableDescriptor;
36 import org.apache.hadoop.hbase.KeyValue;
37 import org.apache.hadoop.hbase.MediumTests;
38 import org.apache.hadoop.hbase.mapreduce.HLogInputFormat.HLogRecordReader;
39 import org.apache.hadoop.hbase.regionserver.wal.HLog;
40 import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
41 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
42 import org.apache.hadoop.hbase.util.Bytes;
43 import org.apache.hadoop.mapreduce.InputSplit;
44 import org.apache.hadoop.mapreduce.MapReduceTestUtil;
45 import org.junit.AfterClass;
46 import org.junit.Before;
47 import org.junit.BeforeClass;
48 import org.junit.Test;
49 import org.junit.experimental.categories.Category;
50
51
52
53
54 @Category(MediumTests.class)
55 public class TestHLogRecordReader {
56 private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
57 private static Configuration conf;
58 private static FileSystem fs;
59 private static Path hbaseDir;
60 private static final TableName tableName =
61 TableName.valueOf(getName());
62 private static final byte [] rowName = tableName.getName();
63 private static final HRegionInfo info = new HRegionInfo(tableName,
64 Bytes.toBytes(""), Bytes.toBytes(""), false);
65 private static final byte [] family = Bytes.toBytes("column");
66 private static final byte [] value = Bytes.toBytes("value");
67 private static HTableDescriptor htd;
68 private static Path logDir;
69 private static String logName;
70
71 private static String getName() {
72 return "TestHLogRecordReader";
73 }
74
75 @Before
76 public void setUp() throws Exception {
77 FileStatus[] entries = fs.listStatus(hbaseDir);
78 for (FileStatus dir : entries) {
79 fs.delete(dir.getPath(), true);
80 }
81
82 }
83 @BeforeClass
84 public static void setUpBeforeClass() throws Exception {
85
86 conf = TEST_UTIL.getConfiguration();
87 conf.setInt("dfs.blocksize", 1024 * 1024);
88 conf.setInt("dfs.replication", 1);
89 TEST_UTIL.startMiniDFSCluster(1);
90
91 conf = TEST_UTIL.getConfiguration();
92 fs = TEST_UTIL.getDFSCluster().getFileSystem();
93
94 hbaseDir = TEST_UTIL.createRootDir();
95
96 logName = HConstants.HREGION_LOGDIR_NAME;
97 logDir = new Path(hbaseDir, logName);
98
99 htd = new HTableDescriptor(tableName);
100 htd.addFamily(new HColumnDescriptor(family));
101 }
102
103 @AfterClass
104 public static void tearDownAfterClass() throws Exception {
105 TEST_UTIL.shutdownMiniCluster();
106 }
107
108
109
110
111
112 @Test
113 public void testPartialRead() throws Exception {
114 HLog log = HLogFactory.createHLog(fs, hbaseDir, logName, conf);
115 long ts = System.currentTimeMillis();
116 WALEdit edit = new WALEdit();
117 edit.add(new KeyValue(rowName, family, Bytes.toBytes("1"), ts, value));
118 log.append(info, tableName, edit, ts, htd);
119 edit = new WALEdit();
120 edit.add(new KeyValue(rowName, family, Bytes.toBytes("2"), ts+1, value));
121 log.append(info, tableName, edit, ts+1, htd);
122 log.rollWriter();
123
124 Thread.sleep(1);
125 long ts1 = System.currentTimeMillis();
126
127 edit = new WALEdit();
128 edit.add(new KeyValue(rowName, family, Bytes.toBytes("3"), ts1+1, value));
129 log.append(info, tableName, edit, ts1+1, htd);
130 edit = new WALEdit();
131 edit.add(new KeyValue(rowName, family, Bytes.toBytes("4"), ts1+2, value));
132 log.append(info, tableName, edit, ts1+2, htd);
133 log.close();
134
135 HLogInputFormat input = new HLogInputFormat();
136 Configuration jobConf = new Configuration(conf);
137 jobConf.set("mapred.input.dir", logDir.toString());
138 jobConf.setLong(HLogInputFormat.END_TIME_KEY, ts);
139
140
141 List<InputSplit> splits = input.getSplits(MapreduceTestingShim.createJobContext(jobConf));
142 assertEquals(1, splits.size());
143 testSplit(splits.get(0), Bytes.toBytes("1"));
144
145 jobConf.setLong(HLogInputFormat.START_TIME_KEY, ts+1);
146 jobConf.setLong(HLogInputFormat.END_TIME_KEY, ts1+1);
147 splits = input.getSplits(MapreduceTestingShim.createJobContext(jobConf));
148
149 assertEquals(2, splits.size());
150
151 testSplit(splits.get(0), Bytes.toBytes("2"));
152
153 testSplit(splits.get(1), Bytes.toBytes("3"));
154 }
155
156
157
158
159
160 @Test
161 public void testHLogRecordReader() throws Exception {
162 HLog log = HLogFactory.createHLog(fs, hbaseDir, logName, conf);
163 byte [] value = Bytes.toBytes("value");
164 WALEdit edit = new WALEdit();
165 edit.add(new KeyValue(rowName, family, Bytes.toBytes("1"),
166 System.currentTimeMillis(), value));
167 log.append(info, tableName, edit,
168 System.currentTimeMillis(), htd);
169
170 Thread.sleep(1);
171 long secondTs = System.currentTimeMillis();
172 log.rollWriter();
173
174 edit = new WALEdit();
175 edit.add(new KeyValue(rowName, family, Bytes.toBytes("2"),
176 System.currentTimeMillis(), value));
177 log.append(info, tableName, edit,
178 System.currentTimeMillis(), htd);
179 log.close();
180 long thirdTs = System.currentTimeMillis();
181
182
183 HLogInputFormat input = new HLogInputFormat();
184 Configuration jobConf = new Configuration(conf);
185 jobConf.set("mapred.input.dir", logDir.toString());
186
187
188 List<InputSplit> splits = input.getSplits(MapreduceTestingShim.createJobContext(jobConf));
189 assertEquals(2, splits.size());
190
191
192 testSplit(splits.get(0), Bytes.toBytes("1"));
193
194 testSplit(splits.get(1), Bytes.toBytes("2"));
195
196
197
198
199 jobConf.setLong(HLogInputFormat.END_TIME_KEY, secondTs-1);
200 splits = input.getSplits(MapreduceTestingShim.createJobContext(jobConf));
201 assertEquals(1, splits.size());
202 testSplit(splits.get(0), Bytes.toBytes("1"));
203
204
205 jobConf.setLong(HLogInputFormat.END_TIME_KEY, Long.MAX_VALUE);
206 jobConf.setLong(HLogInputFormat.START_TIME_KEY, thirdTs);
207 splits = input.getSplits(MapreduceTestingShim.createJobContext(jobConf));
208
209 assertEquals(2, splits.size());
210
211 testSplit(splits.get(0));
212 testSplit(splits.get(1));
213 }
214
215
216
217
218 private void testSplit(InputSplit split, byte[]... columns) throws Exception {
219 HLogRecordReader reader = new HLogRecordReader();
220 reader.initialize(split, MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
221
222 for (byte[] column : columns) {
223 assertTrue(reader.nextKeyValue());
224 KeyValue kv = reader.getCurrentValue().getKeyValues().get(0);
225 if (!Bytes.equals(column, kv.getQualifier())) {
226 assertTrue("expected [" + Bytes.toString(column) + "], actual ["
227 + Bytes.toString(kv.getQualifier()) + "]", false);
228 }
229 }
230 assertFalse(reader.nextKeyValue());
231 reader.close();
232 }
233
234 }