1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.regionserver;
21
22 import java.io.IOException;
23 import java.lang.ref.SoftReference;
24 import java.security.PrivilegedExceptionAction;
25 import java.util.ArrayList;
26 import java.util.Collection;
27 import java.util.Collections;
28 import java.util.Iterator;
29 import java.util.List;
30 import java.util.NavigableSet;
31 import java.util.concurrent.ConcurrentSkipListSet;
32
33 import junit.framework.TestCase;
34
35 import org.apache.commons.logging.Log;
36 import org.apache.commons.logging.LogFactory;
37 import org.apache.hadoop.conf.Configuration;
38 import org.apache.hadoop.fs.FSDataOutputStream;
39 import org.apache.hadoop.fs.FileStatus;
40 import org.apache.hadoop.fs.FileSystem;
41 import org.apache.hadoop.fs.FilterFileSystem;
42 import org.apache.hadoop.fs.LocalFileSystem;
43 import org.apache.hadoop.fs.Path;
44 import org.apache.hadoop.fs.permission.FsPermission;
45 import org.apache.hadoop.hbase.HBaseConfiguration;
46 import org.apache.hadoop.hbase.HBaseTestingUtility;
47 import org.apache.hadoop.hbase.HColumnDescriptor;
48 import org.apache.hadoop.hbase.HRegionInfo;
49 import org.apache.hadoop.hbase.HTableDescriptor;
50 import org.apache.hadoop.hbase.KeyValue;
51 import org.apache.hadoop.hbase.KeyValue.KVComparator;
52 import org.apache.hadoop.hbase.MediumTests;
53 import org.apache.hadoop.hbase.TableName;
54 import org.apache.hadoop.hbase.client.Get;
55 import org.apache.hadoop.hbase.io.compress.Compression;
56 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
57 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
58 import org.apache.hadoop.hbase.io.hfile.HFile;
59 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
60 import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
61 import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
62 import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor;
63 import org.apache.hadoop.hbase.regionserver.wal.HLog;
64 import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
65 import org.apache.hadoop.hbase.security.User;
66 import org.apache.hadoop.hbase.util.Bytes;
67 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
68 import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
69 import org.apache.hadoop.hbase.util.FSUtils;
70 import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
71 import org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
72 import org.apache.hadoop.util.Progressable;
73 import org.junit.experimental.categories.Category;
74 import org.mockito.Mockito;
75
76
77
78
79 @Category(MediumTests.class)
80 public class TestStore extends TestCase {
81 public static final Log LOG = LogFactory.getLog(TestStore.class);
82
83 HStore store;
84 byte [] table = Bytes.toBytes("table");
85 byte [] family = Bytes.toBytes("family");
86
87 byte [] row = Bytes.toBytes("row");
88 byte [] row2 = Bytes.toBytes("row2");
89 byte [] qf1 = Bytes.toBytes("qf1");
90 byte [] qf2 = Bytes.toBytes("qf2");
91 byte [] qf3 = Bytes.toBytes("qf3");
92 byte [] qf4 = Bytes.toBytes("qf4");
93 byte [] qf5 = Bytes.toBytes("qf5");
94 byte [] qf6 = Bytes.toBytes("qf6");
95
96 NavigableSet<byte[]> qualifiers =
97 new ConcurrentSkipListSet<byte[]>(Bytes.BYTES_COMPARATOR);
98
99 List<KeyValue> expected = new ArrayList<KeyValue>();
100 List<KeyValue> result = new ArrayList<KeyValue>();
101
102 long id = System.currentTimeMillis();
103 Get get = new Get(row);
104
105 private HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
106 private final String DIR = TEST_UTIL.getDataTestDir("TestStore").toString();
107
108
109
110
111
112
113 @Override
114 public void setUp() throws IOException {
115 qualifiers.add(qf1);
116 qualifiers.add(qf3);
117 qualifiers.add(qf5);
118
119 Iterator<byte[]> iter = qualifiers.iterator();
120 while(iter.hasNext()){
121 byte [] next = iter.next();
122 expected.add(new KeyValue(row, family, next, 1, (byte[])null));
123 get.addColumn(family, next);
124 }
125 }
126
127 private void init(String methodName) throws IOException {
128 init(methodName, HBaseConfiguration.create());
129 }
130
131 private void init(String methodName, Configuration conf)
132 throws IOException {
133 HColumnDescriptor hcd = new HColumnDescriptor(family);
134
135
136 hcd.setMaxVersions(4);
137 init(methodName, conf, hcd);
138 }
139
140 private void init(String methodName, Configuration conf,
141 HColumnDescriptor hcd) throws IOException {
142 HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(table));
143 init(methodName, conf, htd, hcd);
144 }
145
146 private void init(String methodName, Configuration conf, HTableDescriptor htd,
147 HColumnDescriptor hcd) throws IOException {
148
149 Path basedir = new Path(DIR+methodName);
150 Path tableDir = FSUtils.getTableDir(basedir, htd.getTableName());
151 String logName = "logs";
152 Path logdir = new Path(basedir, logName);
153
154 FileSystem fs = FileSystem.get(conf);
155
156 fs.delete(logdir, true);
157
158 htd.addFamily(hcd);
159 HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
160 HLog hlog = HLogFactory.createHLog(fs, basedir, logName, conf);
161 HRegion region = new HRegion(tableDir, hlog, fs, conf, info, htd, null);
162
163 store = new HStore(region, hcd, conf);
164 }
165
166
167
168
169
170 public void testCreateWriter() throws Exception {
171 Configuration conf = HBaseConfiguration.create();
172 FileSystem fs = FileSystem.get(conf);
173
174 HColumnDescriptor hcd = new HColumnDescriptor(family);
175 hcd.setCompressionType(Compression.Algorithm.GZ);
176 hcd.setDataBlockEncoding(DataBlockEncoding.DIFF);
177 init(getName(), conf, hcd);
178
179
180 StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompression(), false, true);
181 Path path = writer.getPath();
182 writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1)));
183 writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2)));
184 writer.append(new KeyValue(row2, family, qf1, Bytes.toBytes(3)));
185 writer.append(new KeyValue(row2, family, qf2, Bytes.toBytes(4)));
186 writer.close();
187
188
189 HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf));
190 assertEquals(hcd.getCompressionType(), reader.getCompressionAlgorithm());
191 assertEquals(hcd.getDataBlockEncoding(), reader.getEncodingOnDisk());
192 reader.close();
193 }
194
195 public void testDeleteExpiredStoreFiles() throws Exception {
196 int storeFileNum = 4;
197 int ttl = 4;
198 IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge();
199 EnvironmentEdgeManagerTestHelper.injectEdge(edge);
200
201 Configuration conf = HBaseConfiguration.create();
202
203 conf.setBoolean("hbase.store.delete.expired.storefile", true);
204 HColumnDescriptor hcd = new HColumnDescriptor(family);
205 hcd.setTimeToLive(ttl);
206 init(getName(), conf, hcd);
207
208 long sleepTime = this.store.getScanInfo().getTtl() / storeFileNum;
209 long timeStamp;
210
211
212 for (int i = 1; i <= storeFileNum; i++) {
213 LOG.info("Adding some data for the store file #" + i);
214 timeStamp = EnvironmentEdgeManager.currentTimeMillis();
215 this.store.add(new KeyValue(row, family, qf1, timeStamp, (byte[]) null));
216 this.store.add(new KeyValue(row, family, qf2, timeStamp, (byte[]) null));
217 this.store.add(new KeyValue(row, family, qf3, timeStamp, (byte[]) null));
218 flush(i);
219 edge.incrementTime(sleepTime);
220 }
221
222
223 assertEquals(storeFileNum, this.store.getStorefiles().size());
224
225
226
227 for (int i = 1; i <= storeFileNum; i++) {
228
229 CompactionContext compaction = this.store.requestCompaction();
230 CompactionRequest cr = compaction.getRequest();
231
232
233 List<StoreFile> files = new ArrayList<StoreFile>(cr.getFiles());
234 assertEquals(Math.min(i, 2), cr.getFiles().size());
235 for (int j = 0; j < files.size(); j++) {
236 assertTrue(files.get(j).getReader().getMaxTimestamp() < (edge
237 .currentTimeMillis() - this.store.getScanInfo().getTtl()));
238 }
239
240
241 StoreFile compactedFile = this.store.compact(compaction).get(0);
242
243 assertEquals(0, compactedFile.getReader().getEntries());
244
245
246 edge.incrementTime(sleepTime);
247 }
248 }
249
250 public void testLowestModificationTime() throws Exception {
251 Configuration conf = HBaseConfiguration.create();
252 FileSystem fs = FileSystem.get(conf);
253
254 init(getName(), conf);
255
256 int storeFileNum = 4;
257 for (int i = 1; i <= storeFileNum; i++) {
258 LOG.info("Adding some data for the store file #"+i);
259 this.store.add(new KeyValue(row, family, qf1, i, (byte[])null));
260 this.store.add(new KeyValue(row, family, qf2, i, (byte[])null));
261 this.store.add(new KeyValue(row, family, qf3, i, (byte[])null));
262 flush(i);
263 }
264
265 long lowestTimeStampFromManager = StoreUtils.getLowestTimestamp(store.getStorefiles());
266 long lowestTimeStampFromFS = getLowestTimeStampFromFS(fs, store.getStorefiles());
267 assertEquals(lowestTimeStampFromManager,lowestTimeStampFromFS);
268
269
270 store.compact(store.requestCompaction());
271 lowestTimeStampFromManager = StoreUtils.getLowestTimestamp(store.getStorefiles());
272 lowestTimeStampFromFS = getLowestTimeStampFromFS(fs, store.getStorefiles());
273 assertEquals(lowestTimeStampFromManager, lowestTimeStampFromFS);
274 }
275
276 private static long getLowestTimeStampFromFS(FileSystem fs,
277 final Collection<StoreFile> candidates) throws IOException {
278 long minTs = Long.MAX_VALUE;
279 if (candidates.isEmpty()) {
280 return minTs;
281 }
282 Path[] p = new Path[candidates.size()];
283 int i = 0;
284 for (StoreFile sf : candidates) {
285 p[i] = sf.getPath();
286 ++i;
287 }
288
289 FileStatus[] stats = fs.listStatus(p);
290 if (stats == null || stats.length == 0) {
291 return minTs;
292 }
293 for (FileStatus s : stats) {
294 minTs = Math.min(minTs, s.getModificationTime());
295 }
296 return minTs;
297 }
298
299
300
301
302
303
304
305
306
307 public void testEmptyStoreFile() throws IOException {
308 init(this.getName());
309
310 this.store.add(new KeyValue(row, family, qf1, 1, (byte[])null));
311 this.store.add(new KeyValue(row, family, qf2, 1, (byte[])null));
312 flush(1);
313
314
315 StoreFile f = this.store.getStorefiles().iterator().next();
316 Path storedir = f.getPath().getParent();
317 long seqid = f.getMaxSequenceId();
318 Configuration c = HBaseConfiguration.create();
319 FileSystem fs = FileSystem.get(c);
320 StoreFile.Writer w = new StoreFile.WriterBuilder(c, new CacheConfig(c),
321 fs, StoreFile.DEFAULT_BLOCKSIZE_SMALL)
322 .withOutputDir(storedir)
323 .build();
324 w.appendMetadata(seqid + 1, false);
325 w.close();
326 this.store.close();
327
328 this.store = new HStore(this.store.getHRegion(), this.store.getFamily(), c);
329 assertEquals(2, this.store.getStorefilesCount());
330
331 result = HBaseTestingUtility.getFromStoreFile(store,
332 get.getRow(),
333 qualifiers);
334 assertEquals(1, result.size());
335 }
336
337
338
339
340
341 public void testGet_FromMemStoreOnly() throws IOException {
342 init(this.getName());
343
344
345 this.store.add(new KeyValue(row, family, qf1, 1, (byte[])null));
346 this.store.add(new KeyValue(row, family, qf2, 1, (byte[])null));
347 this.store.add(new KeyValue(row, family, qf3, 1, (byte[])null));
348 this.store.add(new KeyValue(row, family, qf4, 1, (byte[])null));
349 this.store.add(new KeyValue(row, family, qf5, 1, (byte[])null));
350 this.store.add(new KeyValue(row, family, qf6, 1, (byte[])null));
351
352
353 result = HBaseTestingUtility.getFromStoreFile(store,
354 get.getRow(), qualifiers);
355
356
357 assertCheck();
358 }
359
360
361
362
363
364 public void testGet_FromFilesOnly() throws IOException {
365 init(this.getName());
366
367
368 this.store.add(new KeyValue(row, family, qf1, 1, (byte[])null));
369 this.store.add(new KeyValue(row, family, qf2, 1, (byte[])null));
370
371 flush(1);
372
373
374 this.store.add(new KeyValue(row, family, qf3, 1, (byte[])null));
375 this.store.add(new KeyValue(row, family, qf4, 1, (byte[])null));
376
377 flush(2);
378
379
380 this.store.add(new KeyValue(row, family, qf5, 1, (byte[])null));
381 this.store.add(new KeyValue(row, family, qf6, 1, (byte[])null));
382
383 flush(3);
384
385
386 result = HBaseTestingUtility.getFromStoreFile(store,
387 get.getRow(),
388 qualifiers);
389
390
391
392 Collections.sort(result, KeyValue.COMPARATOR);
393
394
395 assertCheck();
396 }
397
398
399
400
401
402 public void testGet_FromMemStoreAndFiles() throws IOException {
403 init(this.getName());
404
405
406 this.store.add(new KeyValue(row, family, qf1, 1, (byte[])null));
407 this.store.add(new KeyValue(row, family, qf2, 1, (byte[])null));
408
409 flush(1);
410
411
412 this.store.add(new KeyValue(row, family, qf3, 1, (byte[])null));
413 this.store.add(new KeyValue(row, family, qf4, 1, (byte[])null));
414
415 flush(2);
416
417
418 this.store.add(new KeyValue(row, family, qf5, 1, (byte[])null));
419 this.store.add(new KeyValue(row, family, qf6, 1, (byte[])null));
420
421
422 result = HBaseTestingUtility.getFromStoreFile(store,
423 get.getRow(), qualifiers);
424
425
426 Collections.sort(result, KeyValue.COMPARATOR);
427
428
429 assertCheck();
430 }
431
432 private void flush(int storeFilessize) throws IOException{
433 this.store.snapshot();
434 flushStore(store, id++);
435 assertEquals(storeFilessize, this.store.getStorefiles().size());
436 assertEquals(0, this.store.memstore.kvset.size());
437 }
438
439 private void assertCheck() {
440 assertEquals(expected.size(), result.size());
441 for(int i=0; i<expected.size(); i++) {
442 assertEquals(expected.get(i), result.get(i));
443 }
444 }
445
446
447
448
449
450
451
452 public void testIncrementColumnValue_ICVDuringFlush()
453 throws IOException, InterruptedException {
454 init(this.getName());
455
456 long oldValue = 1L;
457 long newValue = 3L;
458 this.store.add(new KeyValue(row, family, qf1,
459 System.currentTimeMillis(),
460 Bytes.toBytes(oldValue)));
461
462
463 this.store.snapshot();
464
465
466 this.store.add(new KeyValue(row, family, qf2,
467 System.currentTimeMillis(),
468 Bytes.toBytes(oldValue)));
469
470
471 long ret = this.store.updateColumnValue(row, family, qf1, newValue);
472
473
474 assertTrue(ret > 0);
475
476
477 flushStore(store, id++);
478 assertEquals(1, this.store.getStorefiles().size());
479
480 assertEquals(2, this.store.memstore.kvset.size());
481
482
483 Get get = new Get(row);
484 get.addColumn(family, qf1);
485 get.setMaxVersions();
486 List<KeyValue> results = new ArrayList<KeyValue>();
487
488 results = HBaseTestingUtility.getFromStoreFile(store, get);
489 assertEquals(2, results.size());
490
491 long ts1 = results.get(0).getTimestamp();
492 long ts2 = results.get(1).getTimestamp();
493
494 assertTrue(ts1 > ts2);
495
496 assertEquals(newValue, Bytes.toLong(results.get(0).getValue()));
497 assertEquals(oldValue, Bytes.toLong(results.get(1).getValue()));
498 }
499
500 @Override
501 protected void tearDown() throws Exception {
502 super.tearDown();
503 EnvironmentEdgeManagerTestHelper.reset();
504 }
505
506 public void testICV_negMemstoreSize() throws IOException {
507 init(this.getName());
508
509 long time = 100;
510 ManualEnvironmentEdge ee = new ManualEnvironmentEdge();
511 ee.setValue(time);
512 EnvironmentEdgeManagerTestHelper.injectEdge(ee);
513 long newValue = 3L;
514 long size = 0;
515
516
517 size += this.store.add(new KeyValue(Bytes.toBytes("200909091000"), family, qf1,
518 System.currentTimeMillis(),
519 Bytes.toBytes(newValue)));
520 size += this.store.add(new KeyValue(Bytes.toBytes("200909091200"), family, qf1,
521 System.currentTimeMillis(),
522 Bytes.toBytes(newValue)));
523 size += this.store.add(new KeyValue(Bytes.toBytes("200909091300"), family, qf1,
524 System.currentTimeMillis(),
525 Bytes.toBytes(newValue)));
526 size += this.store.add(new KeyValue(Bytes.toBytes("200909091400"), family, qf1,
527 System.currentTimeMillis(),
528 Bytes.toBytes(newValue)));
529 size += this.store.add(new KeyValue(Bytes.toBytes("200909091500"), family, qf1,
530 System.currentTimeMillis(),
531 Bytes.toBytes(newValue)));
532
533
534 for ( int i = 0 ; i < 10000 ; ++i) {
535 newValue++;
536
537 long ret = this.store.updateColumnValue(row, family, qf1, newValue);
538 long ret2 = this.store.updateColumnValue(row2, family, qf1, newValue);
539
540 if (ret != 0) System.out.println("ret: " + ret);
541 if (ret2 != 0) System.out.println("ret2: " + ret2);
542
543 assertTrue("ret: " + ret, ret >= 0);
544 size += ret;
545 assertTrue("ret2: " + ret2, ret2 >= 0);
546 size += ret2;
547
548
549 if (i % 1000 == 0)
550 ee.setValue(++time);
551 }
552
553 long computedSize=0;
554 for (KeyValue kv : this.store.memstore.kvset) {
555 long kvsize = this.store.memstore.heapSizeChange(kv, true);
556
557 computedSize += kvsize;
558 }
559 assertEquals(computedSize, size);
560 }
561
562 public void testIncrementColumnValue_SnapshotFlushCombo() throws Exception {
563 ManualEnvironmentEdge mee = new ManualEnvironmentEdge();
564 EnvironmentEdgeManagerTestHelper.injectEdge(mee);
565 init(this.getName());
566
567 long oldValue = 1L;
568 long newValue = 3L;
569 this.store.add(new KeyValue(row, family, qf1,
570 EnvironmentEdgeManager.currentTimeMillis(),
571 Bytes.toBytes(oldValue)));
572
573
574 this.store.snapshot();
575
576
577 long ret = this.store.updateColumnValue(row, family, qf1, newValue);
578
579
580 assertTrue(ret > 0);
581
582
583 flushStore(store, id++);
584 assertEquals(1, this.store.getStorefiles().size());
585 assertEquals(1, this.store.memstore.kvset.size());
586
587
588 newValue += 1;
589 this.store.updateColumnValue(row, family, qf1, newValue);
590
591
592 newValue += 1;
593 this.store.updateColumnValue(row, family, qf1, newValue);
594
595
596
597
598
599 Get get = new Get(row);
600 get.addColumn(family, qf1);
601 get.setMaxVersions();
602 List<KeyValue> results = new ArrayList<KeyValue>();
603
604 results = HBaseTestingUtility.getFromStoreFile(store, get);
605 assertEquals(2, results.size());
606
607 long ts1 = results.get(0).getTimestamp();
608 long ts2 = results.get(1).getTimestamp();
609
610 assertTrue(ts1 > ts2);
611 assertEquals(newValue, Bytes.toLong(results.get(0).getValue()));
612 assertEquals(oldValue, Bytes.toLong(results.get(1).getValue()));
613
614 mee.setValue(2);
615 newValue += 1;
616 this.store.updateColumnValue(row, family, qf1, newValue);
617
618 results = HBaseTestingUtility.getFromStoreFile(store, get);
619 assertEquals(2, results.size());
620
621 ts1 = results.get(0).getTimestamp();
622 ts2 = results.get(1).getTimestamp();
623
624 assertTrue(ts1 > ts2);
625 assertEquals(newValue, Bytes.toLong(results.get(0).getValue()));
626 assertEquals(oldValue, Bytes.toLong(results.get(1).getValue()));
627 }
628
629 public void testHandleErrorsInFlush() throws Exception {
630 LOG.info("Setting up a faulty file system that cannot write");
631
632 final Configuration conf = HBaseConfiguration.create();
633 User user = User.createUserForTesting(conf,
634 "testhandleerrorsinflush", new String[]{"foo"});
635
636 conf.setClass("fs.file.impl", FaultyFileSystem.class,
637 FileSystem.class);
638 user.runAs(new PrivilegedExceptionAction<Object>() {
639 public Object run() throws Exception {
640
641 FileSystem fs = FileSystem.get(conf);
642 assertEquals(FaultyFileSystem.class, fs.getClass());
643
644
645 init(getName(), conf);
646
647 LOG.info("Adding some data");
648 store.add(new KeyValue(row, family, qf1, 1, (byte[])null));
649 store.add(new KeyValue(row, family, qf2, 1, (byte[])null));
650 store.add(new KeyValue(row, family, qf3, 1, (byte[])null));
651
652 LOG.info("Before flush, we should have no files");
653
654 Collection<StoreFileInfo> files =
655 store.getRegionFileSystem().getStoreFiles(store.getColumnFamilyName());
656 assertEquals(0, files != null ? files.size() : 0);
657
658
659 try {
660 LOG.info("Flushing");
661 flush(1);
662 fail("Didn't bubble up IOE!");
663 } catch (IOException ioe) {
664 assertTrue(ioe.getMessage().contains("Fault injected"));
665 }
666
667 LOG.info("After failed flush, we should still have no files!");
668 files = store.getRegionFileSystem().getStoreFiles(store.getColumnFamilyName());
669 assertEquals(0, files != null ? files.size() : 0);
670 return null;
671 }
672 });
673 }
674
675
676 static class FaultyFileSystem extends FilterFileSystem {
677 List<SoftReference<FaultyOutputStream>> outStreams =
678 new ArrayList<SoftReference<FaultyOutputStream>>();
679 private long faultPos = 200;
680
681 public FaultyFileSystem() {
682 super(new LocalFileSystem());
683 System.err.println("Creating faulty!");
684 }
685
686 @Override
687 public FSDataOutputStream create(Path p) throws IOException {
688 return new FaultyOutputStream(super.create(p), faultPos);
689 }
690
691 @Override
692 public FSDataOutputStream create(Path f, FsPermission permission,
693 boolean overwrite, int bufferSize, short replication, long blockSize,
694 Progressable progress) throws IOException {
695 return new FaultyOutputStream(super.create(f, permission,
696 overwrite, bufferSize, replication, blockSize, progress), faultPos);
697 }
698
699 public FSDataOutputStream createNonRecursive(Path f, boolean overwrite,
700 int bufferSize, short replication, long blockSize, Progressable progress)
701 throws IOException {
702
703
704 return create(f, overwrite, bufferSize, replication, blockSize, progress);
705 }
706 }
707
708 static class FaultyOutputStream extends FSDataOutputStream {
709 volatile long faultPos = Long.MAX_VALUE;
710
711 public FaultyOutputStream(FSDataOutputStream out,
712 long faultPos) throws IOException {
713 super(out, null);
714 this.faultPos = faultPos;
715 }
716
717 @Override
718 public void write(byte[] buf, int offset, int length) throws IOException {
719 System.err.println("faulty stream write at pos " + getPos());
720 injectFault();
721 super.write(buf, offset, length);
722 }
723
724 private void injectFault() throws IOException {
725 if (getPos() >= faultPos) {
726 throw new IOException("Fault injected");
727 }
728 }
729 }
730
731
732
733 private static void flushStore(HStore store, long id) throws IOException {
734 StoreFlushContext storeFlushCtx = store.createFlushContext(id);
735 storeFlushCtx.prepare();
736 storeFlushCtx.flushCache(Mockito.mock(MonitoredTask.class));
737 storeFlushCtx.commit(Mockito.mock(MonitoredTask.class));
738 }
739
740
741
742
743
744
745
746
747
748
749
750 List<KeyValue> getKeyValueSet(long[] timestamps, int numRows,
751 byte[] qualifier, byte[] family) {
752 List<KeyValue> kvList = new ArrayList<KeyValue>();
753 for (int i=1;i<=numRows;i++) {
754 byte[] b = Bytes.toBytes(i);
755 for (long timestamp: timestamps) {
756 kvList.add(new KeyValue(b, family, qualifier, timestamp, b));
757 }
758 }
759 return kvList;
760 }
761
762
763
764
765
766 public void testMultipleTimestamps() throws IOException {
767 int numRows = 1;
768 long[] timestamps1 = new long[] {1,5,10,20};
769 long[] timestamps2 = new long[] {30,80};
770
771 init(this.getName());
772
773 List<KeyValue> kvList1 = getKeyValueSet(timestamps1,numRows, qf1, family);
774 for (KeyValue kv : kvList1) {
775 this.store.add(kv);
776 }
777
778 this.store.snapshot();
779 flushStore(store, id++);
780
781 List<KeyValue> kvList2 = getKeyValueSet(timestamps2,numRows, qf1, family);
782 for(KeyValue kv : kvList2) {
783 this.store.add(kv);
784 }
785
786 List<KeyValue> result;
787 Get get = new Get(Bytes.toBytes(1));
788 get.addColumn(family,qf1);
789
790 get.setTimeRange(0,15);
791 result = HBaseTestingUtility.getFromStoreFile(store, get);
792 assertTrue(result.size()>0);
793
794 get.setTimeRange(40,90);
795 result = HBaseTestingUtility.getFromStoreFile(store, get);
796 assertTrue(result.size()>0);
797
798 get.setTimeRange(10,45);
799 result = HBaseTestingUtility.getFromStoreFile(store, get);
800 assertTrue(result.size()>0);
801
802 get.setTimeRange(80,145);
803 result = HBaseTestingUtility.getFromStoreFile(store, get);
804 assertTrue(result.size()>0);
805
806 get.setTimeRange(1,2);
807 result = HBaseTestingUtility.getFromStoreFile(store, get);
808 assertTrue(result.size()>0);
809
810 get.setTimeRange(90,200);
811 result = HBaseTestingUtility.getFromStoreFile(store, get);
812 assertTrue(result.size()==0);
813 }
814
815
816
817
818
819
820 public void testSplitWithEmptyColFam() throws IOException {
821 init(this.getName());
822 assertNull(store.getSplitPoint());
823 store.getHRegion().forceSplit(null);
824 assertNull(store.getSplitPoint());
825 store.getHRegion().clearSplit_TESTS_ONLY();
826 }
827
828 public void testStoreUsesConfigurationFromHcdAndHtd() throws Exception {
829 final String CONFIG_KEY = "hbase.regionserver.thread.compaction.throttle";
830 long anyValue = 10;
831
832
833
834
835 Configuration conf = HBaseConfiguration.create();
836 conf.setLong(CONFIG_KEY, anyValue);
837 init(getName() + "-xml", conf);
838 assertTrue(store.throttleCompaction(anyValue + 1));
839 assertFalse(store.throttleCompaction(anyValue));
840
841
842 --anyValue;
843 HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(table));
844 HColumnDescriptor hcd = new HColumnDescriptor(family);
845 htd.setConfiguration(CONFIG_KEY, Long.toString(anyValue));
846 init(getName() + "-htd", conf, htd, hcd);
847 assertTrue(store.throttleCompaction(anyValue + 1));
848 assertFalse(store.throttleCompaction(anyValue));
849
850
851 --anyValue;
852 hcd.setConfiguration(CONFIG_KEY, Long.toString(anyValue));
853 init(getName() + "-hcd", conf, htd, hcd);
854 assertTrue(store.throttleCompaction(anyValue + 1));
855 assertFalse(store.throttleCompaction(anyValue));
856 }
857
858 public static class DummyStoreEngine extends DefaultStoreEngine {
859 public static DefaultCompactor lastCreatedCompactor = null;
860 @Override
861 protected void createComponents(
862 Configuration conf, Store store, KVComparator comparator) throws IOException {
863 super.createComponents(conf, store, comparator);
864 lastCreatedCompactor = this.compactor;
865 }
866 }
867
868 public void testStoreUsesSearchEngineOverride() throws Exception {
869 Configuration conf = HBaseConfiguration.create();
870 conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, DummyStoreEngine.class.getName());
871 init(this.getName(), conf);
872 assertEquals(DummyStoreEngine.lastCreatedCompactor, this.store.storeEngine.getCompactor());
873 }
874 }
875