1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.regionserver;
21
22 import java.io.IOException;
23 import java.lang.ref.SoftReference;
24 import java.security.PrivilegedExceptionAction;
25 import java.util.ArrayList;
26 import java.util.Collection;
27 import java.util.Collections;
28 import java.util.Iterator;
29 import java.util.List;
30 import java.util.NavigableSet;
31 import java.util.concurrent.ConcurrentSkipListSet;
32
33 import junit.framework.TestCase;
34
35 import org.apache.commons.logging.Log;
36 import org.apache.commons.logging.LogFactory;
37 import org.apache.hadoop.conf.Configuration;
38 import org.apache.hadoop.fs.FSDataOutputStream;
39 import org.apache.hadoop.fs.FileStatus;
40 import org.apache.hadoop.fs.FileSystem;
41 import org.apache.hadoop.fs.FilterFileSystem;
42 import org.apache.hadoop.fs.LocalFileSystem;
43 import org.apache.hadoop.fs.Path;
44 import org.apache.hadoop.fs.permission.FsPermission;
45 import org.apache.hadoop.hbase.Cell;
46 import org.apache.hadoop.hbase.CellUtil;
47 import org.apache.hadoop.hbase.HBaseConfiguration;
48 import org.apache.hadoop.hbase.HBaseTestingUtility;
49 import org.apache.hadoop.hbase.HColumnDescriptor;
50 import org.apache.hadoop.hbase.HRegionInfo;
51 import org.apache.hadoop.hbase.HTableDescriptor;
52 import org.apache.hadoop.hbase.KeyValue;
53 import org.apache.hadoop.hbase.KeyValue.KVComparator;
54 import org.apache.hadoop.hbase.KeyValueUtil;
55 import org.apache.hadoop.hbase.MediumTests;
56 import org.apache.hadoop.hbase.TableName;
57 import org.apache.hadoop.hbase.client.Get;
58 import org.apache.hadoop.hbase.io.compress.Compression;
59 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
60 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
61 import org.apache.hadoop.hbase.io.hfile.HFile;
62 import org.apache.hadoop.hbase.monitoring.MonitoredTask;
63 import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
64 import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
65 import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor;
66 import org.apache.hadoop.hbase.regionserver.wal.HLog;
67 import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
68 import org.apache.hadoop.hbase.security.User;
69 import org.apache.hadoop.hbase.util.Bytes;
70 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
71 import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
72 import org.apache.hadoop.hbase.util.FSUtils;
73 import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
74 import org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
75 import org.apache.hadoop.util.Progressable;
76 import org.junit.experimental.categories.Category;
77 import org.mockito.Mockito;
78
79
80
81
82 @Category(MediumTests.class)
83 public class TestStore extends TestCase {
84 public static final Log LOG = LogFactory.getLog(TestStore.class);
85
86 HStore store;
87 byte [] table = Bytes.toBytes("table");
88 byte [] family = Bytes.toBytes("family");
89
90 byte [] row = Bytes.toBytes("row");
91 byte [] row2 = Bytes.toBytes("row2");
92 byte [] qf1 = Bytes.toBytes("qf1");
93 byte [] qf2 = Bytes.toBytes("qf2");
94 byte [] qf3 = Bytes.toBytes("qf3");
95 byte [] qf4 = Bytes.toBytes("qf4");
96 byte [] qf5 = Bytes.toBytes("qf5");
97 byte [] qf6 = Bytes.toBytes("qf6");
98
99 NavigableSet<byte[]> qualifiers =
100 new ConcurrentSkipListSet<byte[]>(Bytes.BYTES_COMPARATOR);
101
102 List<Cell> expected = new ArrayList<Cell>();
103 List<Cell> result = new ArrayList<Cell>();
104
105 long id = System.currentTimeMillis();
106 Get get = new Get(row);
107
108 private HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
109 private final String DIR = TEST_UTIL.getDataTestDir("TestStore").toString();
110
111
112
113
114
115
116 @Override
117 public void setUp() throws IOException {
118 qualifiers.add(qf1);
119 qualifiers.add(qf3);
120 qualifiers.add(qf5);
121
122 Iterator<byte[]> iter = qualifiers.iterator();
123 while(iter.hasNext()){
124 byte [] next = iter.next();
125 expected.add(new KeyValue(row, family, next, 1, (byte[])null));
126 get.addColumn(family, next);
127 }
128 }
129
130 private void init(String methodName) throws IOException {
131 init(methodName, HBaseConfiguration.create());
132 }
133
134 private void init(String methodName, Configuration conf)
135 throws IOException {
136 HColumnDescriptor hcd = new HColumnDescriptor(family);
137
138
139 hcd.setMaxVersions(4);
140 init(methodName, conf, hcd);
141 }
142
143 private void init(String methodName, Configuration conf,
144 HColumnDescriptor hcd) throws IOException {
145 HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(table));
146 init(methodName, conf, htd, hcd);
147 }
148
149 @SuppressWarnings("deprecation")
150 private void init(String methodName, Configuration conf, HTableDescriptor htd,
151 HColumnDescriptor hcd) throws IOException {
152
153 Path basedir = new Path(DIR+methodName);
154 Path tableDir = FSUtils.getTableDir(basedir, htd.getTableName());
155 String logName = "logs";
156 Path logdir = new Path(basedir, logName);
157
158 FileSystem fs = FileSystem.get(conf);
159
160 fs.delete(logdir, true);
161
162 htd.addFamily(hcd);
163 HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
164 HLog hlog = HLogFactory.createHLog(fs, basedir, logName, conf);
165 HRegion region = new HRegion(tableDir, hlog, fs, conf, info, htd, null);
166
167 store = new HStore(region, hcd, conf);
168 }
169
170
171
172
173
174 public void testCreateWriter() throws Exception {
175 Configuration conf = HBaseConfiguration.create();
176 FileSystem fs = FileSystem.get(conf);
177
178 HColumnDescriptor hcd = new HColumnDescriptor(family);
179 hcd.setCompressionType(Compression.Algorithm.GZ);
180 hcd.setDataBlockEncoding(DataBlockEncoding.DIFF);
181 init(getName(), conf, hcd);
182
183
184 StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompression(), false, true);
185 Path path = writer.getPath();
186 writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1)));
187 writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2)));
188 writer.append(new KeyValue(row2, family, qf1, Bytes.toBytes(3)));
189 writer.append(new KeyValue(row2, family, qf2, Bytes.toBytes(4)));
190 writer.close();
191
192
193 HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf));
194 assertEquals(hcd.getCompressionType(), reader.getCompressionAlgorithm());
195 assertEquals(hcd.getDataBlockEncoding(), reader.getDataBlockEncoding());
196 reader.close();
197 }
198
199 public void testDeleteExpiredStoreFiles() throws Exception {
200 int storeFileNum = 4;
201 int ttl = 4;
202 IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge();
203 EnvironmentEdgeManagerTestHelper.injectEdge(edge);
204
205 Configuration conf = HBaseConfiguration.create();
206
207 conf.setBoolean("hbase.store.delete.expired.storefile", true);
208 HColumnDescriptor hcd = new HColumnDescriptor(family);
209 hcd.setTimeToLive(ttl);
210 init(getName(), conf, hcd);
211
212 long sleepTime = this.store.getScanInfo().getTtl() / storeFileNum;
213 long timeStamp;
214
215
216 for (int i = 1; i <= storeFileNum; i++) {
217 LOG.info("Adding some data for the store file #" + i);
218 timeStamp = EnvironmentEdgeManager.currentTimeMillis();
219 this.store.add(new KeyValue(row, family, qf1, timeStamp, (byte[]) null));
220 this.store.add(new KeyValue(row, family, qf2, timeStamp, (byte[]) null));
221 this.store.add(new KeyValue(row, family, qf3, timeStamp, (byte[]) null));
222 flush(i);
223 edge.incrementTime(sleepTime);
224 }
225
226
227 assertEquals(storeFileNum, this.store.getStorefiles().size());
228
229
230
231 for (int i = 1; i <= storeFileNum; i++) {
232
233 CompactionContext compaction = this.store.requestCompaction();
234 CompactionRequest cr = compaction.getRequest();
235
236
237 List<StoreFile> files = new ArrayList<StoreFile>(cr.getFiles());
238 assertEquals(Math.min(i, 2), cr.getFiles().size());
239 for (int j = 0; j < files.size(); j++) {
240 assertTrue(files.get(j).getReader().getMaxTimestamp() < (edge
241 .currentTimeMillis() - this.store.getScanInfo().getTtl()));
242 }
243
244
245 StoreFile compactedFile = this.store.compact(compaction).get(0);
246
247 assertEquals(0, compactedFile.getReader().getEntries());
248
249
250 edge.incrementTime(sleepTime);
251 }
252 }
253
254 public void testLowestModificationTime() throws Exception {
255 Configuration conf = HBaseConfiguration.create();
256 FileSystem fs = FileSystem.get(conf);
257
258 init(getName(), conf);
259
260 int storeFileNum = 4;
261 for (int i = 1; i <= storeFileNum; i++) {
262 LOG.info("Adding some data for the store file #"+i);
263 this.store.add(new KeyValue(row, family, qf1, i, (byte[])null));
264 this.store.add(new KeyValue(row, family, qf2, i, (byte[])null));
265 this.store.add(new KeyValue(row, family, qf3, i, (byte[])null));
266 flush(i);
267 }
268
269 long lowestTimeStampFromManager = StoreUtils.getLowestTimestamp(store.getStorefiles());
270 long lowestTimeStampFromFS = getLowestTimeStampFromFS(fs, store.getStorefiles());
271 assertEquals(lowestTimeStampFromManager,lowestTimeStampFromFS);
272
273
274 store.compact(store.requestCompaction());
275 lowestTimeStampFromManager = StoreUtils.getLowestTimestamp(store.getStorefiles());
276 lowestTimeStampFromFS = getLowestTimeStampFromFS(fs, store.getStorefiles());
277 assertEquals(lowestTimeStampFromManager, lowestTimeStampFromFS);
278 }
279
280 private static long getLowestTimeStampFromFS(FileSystem fs,
281 final Collection<StoreFile> candidates) throws IOException {
282 long minTs = Long.MAX_VALUE;
283 if (candidates.isEmpty()) {
284 return minTs;
285 }
286 Path[] p = new Path[candidates.size()];
287 int i = 0;
288 for (StoreFile sf : candidates) {
289 p[i] = sf.getPath();
290 ++i;
291 }
292
293 FileStatus[] stats = fs.listStatus(p);
294 if (stats == null || stats.length == 0) {
295 return minTs;
296 }
297 for (FileStatus s : stats) {
298 minTs = Math.min(minTs, s.getModificationTime());
299 }
300 return minTs;
301 }
302
303
304
305
306
307
308
309
310
311 public void testEmptyStoreFile() throws IOException {
312 init(this.getName());
313
314 this.store.add(new KeyValue(row, family, qf1, 1, (byte[])null));
315 this.store.add(new KeyValue(row, family, qf2, 1, (byte[])null));
316 flush(1);
317
318
319 StoreFile f = this.store.getStorefiles().iterator().next();
320 Path storedir = f.getPath().getParent();
321 long seqid = f.getMaxSequenceId();
322 Configuration c = HBaseConfiguration.create();
323 FileSystem fs = FileSystem.get(c);
324 StoreFile.Writer w = new StoreFile.WriterBuilder(c, new CacheConfig(c),
325 fs, StoreFile.DEFAULT_BLOCKSIZE_SMALL)
326 .withOutputDir(storedir)
327 .build();
328 w.appendMetadata(seqid + 1, false);
329 w.close();
330 this.store.close();
331
332 this.store = new HStore(this.store.getHRegion(), this.store.getFamily(), c);
333 assertEquals(2, this.store.getStorefilesCount());
334
335 result = HBaseTestingUtility.getFromStoreFile(store,
336 get.getRow(),
337 qualifiers);
338 assertEquals(1, result.size());
339 }
340
341
342
343
344
345 public void testGet_FromMemStoreOnly() throws IOException {
346 init(this.getName());
347
348
349 this.store.add(new KeyValue(row, family, qf1, 1, (byte[])null));
350 this.store.add(new KeyValue(row, family, qf2, 1, (byte[])null));
351 this.store.add(new KeyValue(row, family, qf3, 1, (byte[])null));
352 this.store.add(new KeyValue(row, family, qf4, 1, (byte[])null));
353 this.store.add(new KeyValue(row, family, qf5, 1, (byte[])null));
354 this.store.add(new KeyValue(row, family, qf6, 1, (byte[])null));
355
356
357 result = HBaseTestingUtility.getFromStoreFile(store,
358 get.getRow(), qualifiers);
359
360
361 assertCheck();
362 }
363
364
365
366
367
368 public void testGet_FromFilesOnly() throws IOException {
369 init(this.getName());
370
371
372 this.store.add(new KeyValue(row, family, qf1, 1, (byte[])null));
373 this.store.add(new KeyValue(row, family, qf2, 1, (byte[])null));
374
375 flush(1);
376
377
378 this.store.add(new KeyValue(row, family, qf3, 1, (byte[])null));
379 this.store.add(new KeyValue(row, family, qf4, 1, (byte[])null));
380
381 flush(2);
382
383
384 this.store.add(new KeyValue(row, family, qf5, 1, (byte[])null));
385 this.store.add(new KeyValue(row, family, qf6, 1, (byte[])null));
386
387 flush(3);
388
389
390 result = HBaseTestingUtility.getFromStoreFile(store,
391 get.getRow(),
392 qualifiers);
393
394
395
396 Collections.sort(result, KeyValue.COMPARATOR);
397
398
399 assertCheck();
400 }
401
402
403
404
405
406 public void testGet_FromMemStoreAndFiles() throws IOException {
407 init(this.getName());
408
409
410 this.store.add(new KeyValue(row, family, qf1, 1, (byte[])null));
411 this.store.add(new KeyValue(row, family, qf2, 1, (byte[])null));
412
413 flush(1);
414
415
416 this.store.add(new KeyValue(row, family, qf3, 1, (byte[])null));
417 this.store.add(new KeyValue(row, family, qf4, 1, (byte[])null));
418
419 flush(2);
420
421
422 this.store.add(new KeyValue(row, family, qf5, 1, (byte[])null));
423 this.store.add(new KeyValue(row, family, qf6, 1, (byte[])null));
424
425
426 result = HBaseTestingUtility.getFromStoreFile(store,
427 get.getRow(), qualifiers);
428
429
430 Collections.sort(result, KeyValue.COMPARATOR);
431
432
433 assertCheck();
434 }
435
436 private void flush(int storeFilessize) throws IOException{
437 this.store.snapshot();
438 flushStore(store, id++);
439 assertEquals(storeFilessize, this.store.getStorefiles().size());
440 assertEquals(0, this.store.memstore.kvset.size());
441 }
442
443 private void assertCheck() {
444 assertEquals(expected.size(), result.size());
445 for(int i=0; i<expected.size(); i++) {
446 assertEquals(expected.get(i), result.get(i));
447 }
448 }
449
450
451
452
453
454
455
456 public void testIncrementColumnValue_ICVDuringFlush()
457 throws IOException, InterruptedException {
458 init(this.getName());
459
460 long oldValue = 1L;
461 long newValue = 3L;
462 this.store.add(new KeyValue(row, family, qf1,
463 System.currentTimeMillis(),
464 Bytes.toBytes(oldValue)));
465
466
467 this.store.snapshot();
468
469
470 this.store.add(new KeyValue(row, family, qf2,
471 System.currentTimeMillis(),
472 Bytes.toBytes(oldValue)));
473
474
475 long ret = this.store.updateColumnValue(row, family, qf1, newValue);
476
477
478 assertTrue(ret > 0);
479
480
481 flushStore(store, id++);
482 assertEquals(1, this.store.getStorefiles().size());
483
484 assertEquals(2, this.store.memstore.kvset.size());
485
486
487 Get get = new Get(row);
488 get.addColumn(family, qf1);
489 get.setMaxVersions();
490 List<Cell> results = new ArrayList<Cell>();
491
492 results = HBaseTestingUtility.getFromStoreFile(store, get);
493 assertEquals(2, results.size());
494
495 long ts1 = results.get(0).getTimestamp();
496 long ts2 = results.get(1).getTimestamp();
497
498 assertTrue(ts1 > ts2);
499
500 assertEquals(newValue, Bytes.toLong(CellUtil.cloneValue(results.get(0))));
501 assertEquals(oldValue, Bytes.toLong(CellUtil.cloneValue(results.get(1))));
502 }
503
504 @Override
505 protected void tearDown() throws Exception {
506 super.tearDown();
507 EnvironmentEdgeManagerTestHelper.reset();
508 }
509
510 public void testICV_negMemstoreSize() throws IOException {
511 init(this.getName());
512
513 long time = 100;
514 ManualEnvironmentEdge ee = new ManualEnvironmentEdge();
515 ee.setValue(time);
516 EnvironmentEdgeManagerTestHelper.injectEdge(ee);
517 long newValue = 3L;
518 long size = 0;
519
520
521 size += this.store.add(new KeyValue(Bytes.toBytes("200909091000"), family, qf1,
522 System.currentTimeMillis(),
523 Bytes.toBytes(newValue)));
524 size += this.store.add(new KeyValue(Bytes.toBytes("200909091200"), family, qf1,
525 System.currentTimeMillis(),
526 Bytes.toBytes(newValue)));
527 size += this.store.add(new KeyValue(Bytes.toBytes("200909091300"), family, qf1,
528 System.currentTimeMillis(),
529 Bytes.toBytes(newValue)));
530 size += this.store.add(new KeyValue(Bytes.toBytes("200909091400"), family, qf1,
531 System.currentTimeMillis(),
532 Bytes.toBytes(newValue)));
533 size += this.store.add(new KeyValue(Bytes.toBytes("200909091500"), family, qf1,
534 System.currentTimeMillis(),
535 Bytes.toBytes(newValue)));
536
537
538 for ( int i = 0 ; i < 10000 ; ++i) {
539 newValue++;
540
541 long ret = this.store.updateColumnValue(row, family, qf1, newValue);
542 long ret2 = this.store.updateColumnValue(row2, family, qf1, newValue);
543
544 if (ret != 0) System.out.println("ret: " + ret);
545 if (ret2 != 0) System.out.println("ret2: " + ret2);
546
547 assertTrue("ret: " + ret, ret >= 0);
548 size += ret;
549 assertTrue("ret2: " + ret2, ret2 >= 0);
550 size += ret2;
551
552
553 if (i % 1000 == 0)
554 ee.setValue(++time);
555 }
556
557 long computedSize=0;
558 for (KeyValue kv : this.store.memstore.kvset) {
559 long kvsize = MemStore.heapSizeChange(kv, true);
560
561 computedSize += kvsize;
562 }
563 assertEquals(computedSize, size);
564 }
565
566 public void testIncrementColumnValue_SnapshotFlushCombo() throws Exception {
567 ManualEnvironmentEdge mee = new ManualEnvironmentEdge();
568 EnvironmentEdgeManagerTestHelper.injectEdge(mee);
569 init(this.getName());
570
571 long oldValue = 1L;
572 long newValue = 3L;
573 this.store.add(new KeyValue(row, family, qf1,
574 EnvironmentEdgeManager.currentTimeMillis(),
575 Bytes.toBytes(oldValue)));
576
577
578 this.store.snapshot();
579
580
581 long ret = this.store.updateColumnValue(row, family, qf1, newValue);
582
583
584 assertTrue(ret > 0);
585
586
587 flushStore(store, id++);
588 assertEquals(1, this.store.getStorefiles().size());
589 assertEquals(1, this.store.memstore.kvset.size());
590
591
592 newValue += 1;
593 this.store.updateColumnValue(row, family, qf1, newValue);
594
595
596 newValue += 1;
597 this.store.updateColumnValue(row, family, qf1, newValue);
598
599
600
601
602
603 Get get = new Get(row);
604 get.addColumn(family, qf1);
605 get.setMaxVersions();
606 List<Cell> results = new ArrayList<Cell>();
607
608 results = HBaseTestingUtility.getFromStoreFile(store, get);
609 assertEquals(2, results.size());
610
611 long ts1 = results.get(0).getTimestamp();
612 long ts2 = results.get(1).getTimestamp();
613
614 assertTrue(ts1 > ts2);
615 assertEquals(newValue, Bytes.toLong(CellUtil.cloneValue(results.get(0))));
616 assertEquals(oldValue, Bytes.toLong(CellUtil.cloneValue(results.get(1))));
617
618 mee.setValue(2);
619 newValue += 1;
620 this.store.updateColumnValue(row, family, qf1, newValue);
621
622 results = HBaseTestingUtility.getFromStoreFile(store, get);
623 assertEquals(2, results.size());
624
625 ts1 = results.get(0).getTimestamp();
626 ts2 = results.get(1).getTimestamp();
627
628 assertTrue(ts1 > ts2);
629 assertEquals(newValue, Bytes.toLong(CellUtil.cloneValue(results.get(0))));
630 assertEquals(oldValue, Bytes.toLong(CellUtil.cloneValue(results.get(1))));
631 }
632
633 public void testHandleErrorsInFlush() throws Exception {
634 LOG.info("Setting up a faulty file system that cannot write");
635
636 final Configuration conf = HBaseConfiguration.create();
637 User user = User.createUserForTesting(conf,
638 "testhandleerrorsinflush", new String[]{"foo"});
639
640 conf.setClass("fs.file.impl", FaultyFileSystem.class,
641 FileSystem.class);
642 user.runAs(new PrivilegedExceptionAction<Object>() {
643 public Object run() throws Exception {
644
645 FileSystem fs = FileSystem.get(conf);
646 assertEquals(FaultyFileSystem.class, fs.getClass());
647
648
649 init(getName(), conf);
650
651 LOG.info("Adding some data");
652 store.add(new KeyValue(row, family, qf1, 1, (byte[])null));
653 store.add(new KeyValue(row, family, qf2, 1, (byte[])null));
654 store.add(new KeyValue(row, family, qf3, 1, (byte[])null));
655
656 LOG.info("Before flush, we should have no files");
657
658 Collection<StoreFileInfo> files =
659 store.getRegionFileSystem().getStoreFiles(store.getColumnFamilyName());
660 assertEquals(0, files != null ? files.size() : 0);
661
662
663 try {
664 LOG.info("Flushing");
665 flush(1);
666 fail("Didn't bubble up IOE!");
667 } catch (IOException ioe) {
668 assertTrue(ioe.getMessage().contains("Fault injected"));
669 }
670
671 LOG.info("After failed flush, we should still have no files!");
672 files = store.getRegionFileSystem().getStoreFiles(store.getColumnFamilyName());
673 assertEquals(0, files != null ? files.size() : 0);
674 return null;
675 }
676 });
677 }
678
679
680 static class FaultyFileSystem extends FilterFileSystem {
681 List<SoftReference<FaultyOutputStream>> outStreams =
682 new ArrayList<SoftReference<FaultyOutputStream>>();
683 private long faultPos = 200;
684
685 public FaultyFileSystem() {
686 super(new LocalFileSystem());
687 System.err.println("Creating faulty!");
688 }
689
690 @Override
691 public FSDataOutputStream create(Path p) throws IOException {
692 return new FaultyOutputStream(super.create(p), faultPos);
693 }
694
695 @Override
696 public FSDataOutputStream create(Path f, FsPermission permission,
697 boolean overwrite, int bufferSize, short replication, long blockSize,
698 Progressable progress) throws IOException {
699 return new FaultyOutputStream(super.create(f, permission,
700 overwrite, bufferSize, replication, blockSize, progress), faultPos);
701 }
702
703 public FSDataOutputStream createNonRecursive(Path f, boolean overwrite,
704 int bufferSize, short replication, long blockSize, Progressable progress)
705 throws IOException {
706
707
708 return create(f, overwrite, bufferSize, replication, blockSize, progress);
709 }
710 }
711
712 static class FaultyOutputStream extends FSDataOutputStream {
713 volatile long faultPos = Long.MAX_VALUE;
714
715 public FaultyOutputStream(FSDataOutputStream out,
716 long faultPos) throws IOException {
717 super(out, null);
718 this.faultPos = faultPos;
719 }
720
721 @Override
722 public void write(byte[] buf, int offset, int length) throws IOException {
723 System.err.println("faulty stream write at pos " + getPos());
724 injectFault();
725 super.write(buf, offset, length);
726 }
727
728 private void injectFault() throws IOException {
729 if (getPos() >= faultPos) {
730 throw new IOException("Fault injected");
731 }
732 }
733 }
734
735
736
737 private static void flushStore(HStore store, long id) throws IOException {
738 StoreFlushContext storeFlushCtx = store.createFlushContext(id);
739 storeFlushCtx.prepare();
740 storeFlushCtx.flushCache(Mockito.mock(MonitoredTask.class));
741 storeFlushCtx.commit(Mockito.mock(MonitoredTask.class));
742 }
743
744
745
746
747
748
749
750
751
752
753
754 List<Cell> getKeyValueSet(long[] timestamps, int numRows,
755 byte[] qualifier, byte[] family) {
756 List<Cell> kvList = new ArrayList<Cell>();
757 for (int i=1;i<=numRows;i++) {
758 byte[] b = Bytes.toBytes(i);
759 for (long timestamp: timestamps) {
760 kvList.add(new KeyValue(b, family, qualifier, timestamp, b));
761 }
762 }
763 return kvList;
764 }
765
766
767
768
769
770 public void testMultipleTimestamps() throws IOException {
771 int numRows = 1;
772 long[] timestamps1 = new long[] {1,5,10,20};
773 long[] timestamps2 = new long[] {30,80};
774
775 init(this.getName());
776
777 List<Cell> kvList1 = getKeyValueSet(timestamps1,numRows, qf1, family);
778 for (Cell kv : kvList1) {
779 this.store.add(KeyValueUtil.ensureKeyValue(kv));
780 }
781
782 this.store.snapshot();
783 flushStore(store, id++);
784
785 List<Cell> kvList2 = getKeyValueSet(timestamps2,numRows, qf1, family);
786 for(Cell kv : kvList2) {
787 this.store.add(KeyValueUtil.ensureKeyValue(kv));
788 }
789
790 List<Cell> result;
791 Get get = new Get(Bytes.toBytes(1));
792 get.addColumn(family,qf1);
793
794 get.setTimeRange(0,15);
795 result = HBaseTestingUtility.getFromStoreFile(store, get);
796 assertTrue(result.size()>0);
797
798 get.setTimeRange(40,90);
799 result = HBaseTestingUtility.getFromStoreFile(store, get);
800 assertTrue(result.size()>0);
801
802 get.setTimeRange(10,45);
803 result = HBaseTestingUtility.getFromStoreFile(store, get);
804 assertTrue(result.size()>0);
805
806 get.setTimeRange(80,145);
807 result = HBaseTestingUtility.getFromStoreFile(store, get);
808 assertTrue(result.size()>0);
809
810 get.setTimeRange(1,2);
811 result = HBaseTestingUtility.getFromStoreFile(store, get);
812 assertTrue(result.size()>0);
813
814 get.setTimeRange(90,200);
815 result = HBaseTestingUtility.getFromStoreFile(store, get);
816 assertTrue(result.size()==0);
817 }
818
819
820
821
822
823
824 public void testSplitWithEmptyColFam() throws IOException {
825 init(this.getName());
826 assertNull(store.getSplitPoint());
827 store.getHRegion().forceSplit(null);
828 assertNull(store.getSplitPoint());
829 store.getHRegion().clearSplit_TESTS_ONLY();
830 }
831
832 public void testStoreUsesConfigurationFromHcdAndHtd() throws Exception {
833 final String CONFIG_KEY = "hbase.regionserver.thread.compaction.throttle";
834 long anyValue = 10;
835
836
837
838
839 Configuration conf = HBaseConfiguration.create();
840 conf.setLong(CONFIG_KEY, anyValue);
841 init(getName() + "-xml", conf);
842 assertTrue(store.throttleCompaction(anyValue + 1));
843 assertFalse(store.throttleCompaction(anyValue));
844
845
846 --anyValue;
847 HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(table));
848 HColumnDescriptor hcd = new HColumnDescriptor(family);
849 htd.setConfiguration(CONFIG_KEY, Long.toString(anyValue));
850 init(getName() + "-htd", conf, htd, hcd);
851 assertTrue(store.throttleCompaction(anyValue + 1));
852 assertFalse(store.throttleCompaction(anyValue));
853
854
855 --anyValue;
856 hcd.setConfiguration(CONFIG_KEY, Long.toString(anyValue));
857 init(getName() + "-hcd", conf, htd, hcd);
858 assertTrue(store.throttleCompaction(anyValue + 1));
859 assertFalse(store.throttleCompaction(anyValue));
860 }
861
862 public static class DummyStoreEngine extends DefaultStoreEngine {
863 public static DefaultCompactor lastCreatedCompactor = null;
864 @Override
865 protected void createComponents(
866 Configuration conf, Store store, KVComparator comparator) throws IOException {
867 super.createComponents(conf, store, comparator);
868 lastCreatedCompactor = this.compactor;
869 }
870 }
871
872 public void testStoreUsesSearchEngineOverride() throws Exception {
873 Configuration conf = HBaseConfiguration.create();
874 conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, DummyStoreEngine.class.getName());
875 init(this.getName(), conf);
876 assertEquals(DummyStoreEngine.lastCreatedCompactor, this.store.storeEngine.getCompactor());
877 }
878 }
879