1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.io.hfile;
19
20 import java.io.ByteArrayOutputStream;
21 import java.io.DataInputStream;
22 import java.io.DataOutput;
23 import java.io.DataOutputStream;
24 import java.io.IOException;
25 import java.io.InputStream;
26 import java.nio.ByteBuffer;
27 import java.util.concurrent.locks.Lock;
28 import java.util.concurrent.locks.ReentrantLock;
29
30 import org.apache.hadoop.fs.FSDataInputStream;
31 import org.apache.hadoop.fs.FSDataOutputStream;
32 import org.apache.hadoop.fs.Path;
33 import org.apache.hadoop.hbase.Cell;
34 import org.apache.hadoop.hbase.HConstants;
35 import org.apache.hadoop.hbase.classification.InterfaceAudience;
36 import org.apache.hadoop.hbase.fs.HFileSystem;
37 import org.apache.hadoop.hbase.io.ByteBufferInputStream;
38 import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
39 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
40 import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
41 import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
42 import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
43 import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
44 import org.apache.hadoop.hbase.util.ByteBufferUtils;
45 import org.apache.hadoop.hbase.util.Bytes;
46 import org.apache.hadoop.hbase.util.ChecksumType;
47 import org.apache.hadoop.hbase.util.ClassSize;
48 import org.apache.hadoop.io.IOUtils;
49
50 import com.google.common.base.Preconditions;
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84 @InterfaceAudience.Private
85 public class HFileBlock implements Cacheable {
86
87
88
89
90
91
92 static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;
93
94 public static final boolean FILL_HEADER = true;
95 public static final boolean DONT_FILL_HEADER = false;
96
97
98
99
100
101 public static final int ENCODED_HEADER_SIZE = HConstants.HFILEBLOCK_HEADER_SIZE
102 + DataBlockEncoding.ID_SIZE;
103
104 static final byte[] DUMMY_HEADER_NO_CHECKSUM =
105 new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];
106
107 public static final int BYTE_BUFFER_HEAP_SIZE = (int) ClassSize.estimateBase(
108 ByteBuffer.wrap(new byte[0], 0, 0).getClass(), false);
109
110
111 public static final int EXTRA_SERIALIZATION_SPACE = Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT
112 + Bytes.SIZEOF_LONG;
113
114
115
116
117 static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;
118
119 private static final CacheableDeserializer<Cacheable> blockDeserializer =
120 new CacheableDeserializer<Cacheable>() {
121 public HFileBlock deserialize(ByteBuffer buf, boolean reuse) throws IOException{
122 buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();
123 ByteBuffer newByteBuffer;
124 if (reuse) {
125 newByteBuffer = buf.slice();
126 } else {
127 newByteBuffer = ByteBuffer.allocate(buf.limit());
128 newByteBuffer.put(buf);
129 }
130 buf.position(buf.limit());
131 buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);
132 boolean usesChecksum = buf.get() == (byte)1;
133 HFileBlock ourBuffer = new HFileBlock(newByteBuffer, usesChecksum);
134 ourBuffer.offset = buf.getLong();
135 ourBuffer.nextBlockOnDiskSizeWithHeader = buf.getInt();
136 if (ourBuffer.hasNextBlockHeader()) {
137 ourBuffer.buf.limit(ourBuffer.buf.limit() - ourBuffer.headerSize());
138 }
139 return ourBuffer;
140 }
141
142 @Override
143 public int getDeserialiserIdentifier() {
144 return deserializerIdentifier;
145 }
146
147 @Override
148 public HFileBlock deserialize(ByteBuffer b) throws IOException {
149 return deserialize(b, false);
150 }
151 };
152 private static final int deserializerIdentifier;
153 static {
154 deserializerIdentifier = CacheableDeserializerIdManager
155 .registerDeserializer(blockDeserializer);
156 }
157
158
159 private BlockType blockType;
160
161
162 private int onDiskSizeWithoutHeader;
163
164
165 private final int uncompressedSizeWithoutHeader;
166
167
168 private final long prevBlockOffset;
169
170
171
172
173
174 private final int onDiskDataSizeWithHeader;
175
176
177 private ByteBuffer buf;
178
179
180 private HFileContext fileContext;
181
182
183
184
185
186 private long offset = -1;
187
188
189
190
191
192
193 private int nextBlockOnDiskSizeWithHeader = -1;
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211 HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,
212 long prevBlockOffset, ByteBuffer buf, boolean fillHeader, long offset,
213 int onDiskDataSizeWithHeader, HFileContext fileContext) {
214 this.blockType = blockType;
215 this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;
216 this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;
217 this.prevBlockOffset = prevBlockOffset;
218 this.buf = buf;
219 this.offset = offset;
220 this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;
221 this.fileContext = fileContext;
222 if (fillHeader)
223 overwriteHeader();
224 this.buf.rewind();
225 }
226
227
228
229
230 HFileBlock(HFileBlock that) {
231 this.blockType = that.blockType;
232 this.onDiskSizeWithoutHeader = that.onDiskSizeWithoutHeader;
233 this.uncompressedSizeWithoutHeader = that.uncompressedSizeWithoutHeader;
234 this.prevBlockOffset = that.prevBlockOffset;
235 this.buf = that.buf.duplicate();
236 this.offset = that.offset;
237 this.onDiskDataSizeWithHeader = that.onDiskDataSizeWithHeader;
238 this.fileContext = that.fileContext;
239 this.nextBlockOnDiskSizeWithHeader = that.nextBlockOnDiskSizeWithHeader;
240 }
241
242
243
244
245
246
247
248
249
250 HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {
251 b.rewind();
252 blockType = BlockType.read(b);
253 onDiskSizeWithoutHeader = b.getInt();
254 uncompressedSizeWithoutHeader = b.getInt();
255 prevBlockOffset = b.getLong();
256 HFileContextBuilder contextBuilder = new HFileContextBuilder();
257 contextBuilder.withHBaseCheckSum(usesHBaseChecksum);
258 if (usesHBaseChecksum) {
259 contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));
260 contextBuilder.withBytesPerCheckSum(b.getInt());
261 this.onDiskDataSizeWithHeader = b.getInt();
262 } else {
263 contextBuilder.withChecksumType(ChecksumType.NULL);
264 contextBuilder.withBytesPerCheckSum(0);
265 this.onDiskDataSizeWithHeader = onDiskSizeWithoutHeader +
266 HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
267 }
268 this.fileContext = contextBuilder.build();
269 buf = b;
270 buf.rewind();
271 }
272
273 public BlockType getBlockType() {
274 return blockType;
275 }
276
277
278 public short getDataBlockEncodingId() {
279 if (blockType != BlockType.ENCODED_DATA) {
280 throw new IllegalArgumentException("Querying encoder ID of a block " +
281 "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);
282 }
283 return buf.getShort(headerSize());
284 }
285
286
287
288
289 public int getOnDiskSizeWithHeader() {
290 return onDiskSizeWithoutHeader + headerSize();
291 }
292
293
294
295
296 public int getOnDiskSizeWithoutHeader() {
297 return onDiskSizeWithoutHeader;
298 }
299
300
301
302
303 public int getUncompressedSizeWithoutHeader() {
304 return uncompressedSizeWithoutHeader;
305 }
306
307
308
309
310
311 public long getPrevBlockOffset() {
312 return prevBlockOffset;
313 }
314
315
316
317
318
319 private void overwriteHeader() {
320 buf.rewind();
321 blockType.write(buf);
322 buf.putInt(onDiskSizeWithoutHeader);
323 buf.putInt(uncompressedSizeWithoutHeader);
324 buf.putLong(prevBlockOffset);
325 if (this.fileContext.isUseHBaseChecksum()) {
326 buf.put(fileContext.getChecksumType().getCode());
327 buf.putInt(fileContext.getBytesPerChecksum());
328 buf.putInt(onDiskDataSizeWithHeader);
329 }
330 }
331
332
333
334
335
336
337 public ByteBuffer getBufferWithoutHeader() {
338 ByteBuffer dup = this.buf.duplicate();
339 dup.position(headerSize());
340 dup.limit(buf.limit() - totalChecksumBytes());
341 return dup.slice();
342 }
343
344
345
346
347
348
349
350
351
352
353
354 public ByteBuffer getBufferReadOnly() {
355 ByteBuffer dup = this.buf.duplicate();
356 dup.limit(buf.limit() - totalChecksumBytes());
357 return dup.slice();
358 }
359
360
361
362
363
364
365
366
367 public ByteBuffer getBufferReadOnlyWithHeader() {
368 ByteBuffer dup = this.buf.duplicate();
369 return dup.slice();
370 }
371
372
373
374
375
376
377
378 ByteBuffer getBufferWithHeader() {
379 ByteBuffer dupBuf = buf.duplicate();
380 dupBuf.rewind();
381 return dupBuf;
382 }
383
384 private void sanityCheckAssertion(long valueFromBuf, long valueFromField,
385 String fieldName) throws IOException {
386 if (valueFromBuf != valueFromField) {
387 throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf
388 + ") is different from that in the field (" + valueFromField + ")");
389 }
390 }
391
392 private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)
393 throws IOException {
394 if (valueFromBuf != valueFromField) {
395 throw new IOException("Block type stored in the buffer: " +
396 valueFromBuf + ", block type field: " + valueFromField);
397 }
398 }
399
400
401
402
403
404
405
406
407 void sanityCheck() throws IOException {
408 buf.rewind();
409
410 sanityCheckAssertion(BlockType.read(buf), blockType);
411
412 sanityCheckAssertion(buf.getInt(), onDiskSizeWithoutHeader,
413 "onDiskSizeWithoutHeader");
414
415 sanityCheckAssertion(buf.getInt(), uncompressedSizeWithoutHeader,
416 "uncompressedSizeWithoutHeader");
417
418 sanityCheckAssertion(buf.getLong(), prevBlockOffset, "prevBlocKOffset");
419 if (this.fileContext.isUseHBaseChecksum()) {
420 sanityCheckAssertion(buf.get(), this.fileContext.getChecksumType().getCode(), "checksumType");
421 sanityCheckAssertion(buf.getInt(), this.fileContext.getBytesPerChecksum(), "bytesPerChecksum");
422 sanityCheckAssertion(buf.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");
423 }
424
425 int cksumBytes = totalChecksumBytes();
426 int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;
427 if (buf.limit() != expectedBufLimit) {
428 throw new AssertionError("Expected buffer limit " + expectedBufLimit
429 + ", got " + buf.limit());
430 }
431
432
433
434 int hdrSize = headerSize();
435 if (buf.capacity() != expectedBufLimit &&
436 buf.capacity() != expectedBufLimit + hdrSize) {
437 throw new AssertionError("Invalid buffer capacity: " + buf.capacity() +
438 ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));
439 }
440 }
441
442 @Override
443 public String toString() {
444 StringBuilder sb = new StringBuilder()
445 .append("HFileBlock [")
446 .append(" fileOffset=").append(offset)
447 .append(" headerSize()=").append(headerSize())
448 .append(" blockType=").append(blockType)
449 .append(" onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)
450 .append(" uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)
451 .append(" prevBlockOffset=").append(prevBlockOffset)
452 .append(" isUseHBaseChecksum()=").append(fileContext.isUseHBaseChecksum());
453 if (fileContext.isUseHBaseChecksum()) {
454 sb.append(" checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))
455 .append(" bytesPerChecksum=").append(this.buf.getInt(24 + 1))
456 .append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);
457 } else {
458 sb.append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)
459 .append("(").append(onDiskSizeWithoutHeader)
460 .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");
461 }
462 String dataBegin = null;
463 if (buf.hasArray()) {
464 dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),
465 Math.min(32, buf.limit() - buf.arrayOffset() - headerSize()));
466 } else {
467 ByteBuffer bufWithoutHeader = getBufferWithoutHeader();
468 byte[] dataBeginBytes = new byte[Math.min(32,
469 bufWithoutHeader.limit() - bufWithoutHeader.position())];
470 bufWithoutHeader.get(dataBeginBytes);
471 dataBegin = Bytes.toStringBinary(dataBeginBytes);
472 }
473 sb.append(" getOnDiskSizeWithHeader()=").append(getOnDiskSizeWithHeader())
474 .append(" totalChecksumBytes()=").append(totalChecksumBytes())
475 .append(" isUnpacked()=").append(isUnpacked())
476 .append(" buf=[ ").append(buf).append(" ]")
477 .append(" dataBeginsWith=").append(dataBegin)
478 .append(" fileContext=").append(fileContext)
479 .append(" ]");
480 return sb.toString();
481 }
482
483
484
485
486 private void validateOnDiskSizeWithoutHeader(int expectedOnDiskSizeWithoutHeader)
487 throws IOException {
488 if (onDiskSizeWithoutHeader != expectedOnDiskSizeWithoutHeader) {
489 String dataBegin = null;
490 if (buf.hasArray()) {
491 dataBegin = Bytes.toStringBinary(buf.array(), buf.arrayOffset(), Math.min(32, buf.limit()));
492 } else {
493 ByteBuffer bufDup = getBufferReadOnly();
494 byte[] dataBeginBytes = new byte[Math.min(32, bufDup.limit() - bufDup.position())];
495 bufDup.get(dataBeginBytes);
496 dataBegin = Bytes.toStringBinary(dataBeginBytes);
497 }
498 String blockInfoMsg =
499 "Block offset: " + offset + ", data starts with: " + dataBegin;
500 throw new IOException("On-disk size without header provided is "
501 + expectedOnDiskSizeWithoutHeader + ", but block "
502 + "header contains " + onDiskSizeWithoutHeader + ". " +
503 blockInfoMsg);
504 }
505 }
506
507
508
509
510
511 HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {
512 if (!fileContext.isCompressedOrEncrypted()) {
513
514
515
516 return this;
517 }
518
519 HFileBlock unpacked = new HFileBlock(this);
520 unpacked.allocateBuffer();
521
522 HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?
523 reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();
524
525 ByteBuffer dup = this.buf.duplicate();
526 dup.position(this.headerSize());
527 dup = dup.slice();
528 ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),
529 unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),
530 dup);
531
532
533 if (unpacked.hasNextBlockHeader()) {
534
535
536
537
538 ByteBuffer inDup = this.buf.duplicate();
539 inDup.limit(inDup.limit() + headerSize());
540 ByteBuffer outDup = unpacked.buf.duplicate();
541 outDup.limit(outDup.limit() + unpacked.headerSize());
542 ByteBufferUtils.copyFromBufferToBuffer(
543 outDup,
544 inDup,
545 this.onDiskDataSizeWithHeader,
546 unpacked.headerSize() + unpacked.uncompressedSizeWithoutHeader
547 + unpacked.totalChecksumBytes(), unpacked.headerSize());
548 }
549 return unpacked;
550 }
551
552
553
554
555 private boolean hasNextBlockHeader() {
556 return nextBlockOnDiskSizeWithHeader > 0;
557 }
558
559
560
561
562
563
564 private void allocateBuffer() {
565 int cksumBytes = totalChecksumBytes();
566 int headerSize = headerSize();
567 int capacityNeeded = headerSize + uncompressedSizeWithoutHeader +
568 cksumBytes + (hasNextBlockHeader() ? headerSize : 0);
569
570
571 ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);
572
573
574
575 ByteBuffer dup = buf.duplicate();
576 dup.position(0);
577 dup.get(newBuf.array(), newBuf.arrayOffset(), headerSize);
578
579 buf = newBuf;
580
581 buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);
582 }
583
584
585
586
587
588 public boolean isUnpacked() {
589 final int cksumBytes = totalChecksumBytes();
590 final int headerSize = headerSize();
591 final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;
592 final int bufCapacity = buf.capacity();
593 return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;
594 }
595
596
597 public void assumeUncompressed() throws IOException {
598 if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader +
599 totalChecksumBytes()) {
600 throw new IOException("Using no compression but "
601 + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "
602 + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader
603 + ", numChecksumbytes=" + totalChecksumBytes());
604 }
605 }
606
607
608
609
610
611 public void expectType(BlockType expectedType) throws IOException {
612 if (blockType != expectedType) {
613 throw new IOException("Invalid block type: expected=" + expectedType
614 + ", actual=" + blockType);
615 }
616 }
617
618
619 public long getOffset() {
620 if (offset < 0) {
621 throw new IllegalStateException(
622 "HFile block offset not initialized properly");
623 }
624 return offset;
625 }
626
627
628
629
630 public DataInputStream getByteStream() {
631 ByteBuffer dup = this.buf.duplicate();
632 dup.position(this.headerSize());
633 return new DataInputStream(new ByteBufferInputStream(dup));
634 }
635
636 @Override
637 public long heapSize() {
638 long size = ClassSize.align(
639 ClassSize.OBJECT +
640
641 3 * ClassSize.REFERENCE +
642
643
644 4 * Bytes.SIZEOF_INT +
645
646 2 * Bytes.SIZEOF_LONG +
647
648 fileContext.heapSize()
649 );
650
651 if (buf != null) {
652
653 size += ClassSize.align(buf.capacity() + BYTE_BUFFER_HEAP_SIZE);
654 }
655
656 return ClassSize.align(size);
657 }
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674 public static boolean readWithExtra(InputStream in, byte buf[],
675 int bufOffset, int necessaryLen, int extraLen) throws IOException {
676 int bytesRemaining = necessaryLen + extraLen;
677 while (bytesRemaining > 0) {
678 int ret = in.read(buf, bufOffset, bytesRemaining);
679 if (ret == -1 && bytesRemaining <= extraLen) {
680
681 break;
682 }
683
684 if (ret < 0) {
685 throw new IOException("Premature EOF from inputStream (read "
686 + "returned " + ret + ", was trying to read " + necessaryLen
687 + " necessary bytes and " + extraLen + " extra bytes, "
688 + "successfully read "
689 + (necessaryLen + extraLen - bytesRemaining));
690 }
691 bufOffset += ret;
692 bytesRemaining -= ret;
693 }
694 return bytesRemaining <= 0;
695 }
696
697
698
699
700
701 public int getNextBlockOnDiskSizeWithHeader() {
702 return nextBlockOnDiskSizeWithHeader;
703 }
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718 public static class Writer {
719
720 private enum State {
721 INIT,
722 WRITING,
723 BLOCK_READY
724 };
725
726
727 private State state = State.INIT;
728
729
730 private final HFileDataBlockEncoder dataBlockEncoder;
731
732 private HFileBlockEncodingContext dataBlockEncodingCtx;
733
734
735 private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;
736
737
738
739
740
741
742
743 private ByteArrayOutputStream baosInMemory;
744
745
746
747
748
749
750 private BlockType blockType;
751
752
753
754
755
756 private DataOutputStream userDataStream;
757
758
759
760 private int unencodedDataSizeWritten;
761
762
763
764
765
766
767 private byte[] onDiskBytesWithHeader;
768
769
770
771
772
773
774
775 private byte[] onDiskChecksum;
776
777
778
779
780
781
782
783 private byte[] uncompressedBytesWithHeader;
784
785
786
787
788
789 private long startOffset;
790
791
792
793
794
795 private long[] prevOffsetByType;
796
797
798 private long prevOffset;
799
800 private HFileContext fileContext;
801
802
803
804
805 public Writer(HFileDataBlockEncoder dataBlockEncoder, HFileContext fileContext) {
806 this.dataBlockEncoder = dataBlockEncoder != null
807 ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;
808 defaultBlockEncodingCtx = new HFileBlockDefaultEncodingContext(null,
809 HConstants.HFILEBLOCK_DUMMY_HEADER, fileContext);
810 dataBlockEncodingCtx = this.dataBlockEncoder
811 .newDataBlockEncodingContext(HConstants.HFILEBLOCK_DUMMY_HEADER, fileContext);
812
813 if (fileContext.getBytesPerChecksum() < HConstants.HFILEBLOCK_HEADER_SIZE) {
814 throw new RuntimeException("Unsupported value of bytesPerChecksum. " +
815 " Minimum is " + HConstants.HFILEBLOCK_HEADER_SIZE + " but the configured value is " +
816 fileContext.getBytesPerChecksum());
817 }
818
819 baosInMemory = new ByteArrayOutputStream();
820
821 prevOffsetByType = new long[BlockType.values().length];
822 for (int i = 0; i < prevOffsetByType.length; ++i)
823 prevOffsetByType[i] = -1;
824
825 this.fileContext = fileContext;
826 }
827
828
829
830
831
832
833
834 public DataOutputStream startWriting(BlockType newBlockType)
835 throws IOException {
836 if (state == State.BLOCK_READY && startOffset != -1) {
837
838
839 prevOffsetByType[blockType.getId()] = startOffset;
840 }
841
842 startOffset = -1;
843 blockType = newBlockType;
844
845 baosInMemory.reset();
846 baosInMemory.write(HConstants.HFILEBLOCK_DUMMY_HEADER);
847
848 state = State.WRITING;
849
850
851 userDataStream = new DataOutputStream(baosInMemory);
852 if (newBlockType == BlockType.DATA) {
853 this.dataBlockEncoder.startBlockEncoding(dataBlockEncodingCtx, userDataStream);
854 }
855 this.unencodedDataSizeWritten = 0;
856 return userDataStream;
857 }
858
859
860
861
862
863
864 public void write(Cell cell) throws IOException{
865 expectState(State.WRITING);
866 this.unencodedDataSizeWritten += this.dataBlockEncoder.encode(cell, dataBlockEncodingCtx,
867 this.userDataStream);
868 }
869
870
871
872
873
874
875
876
877 DataOutputStream getUserDataStream() {
878 expectState(State.WRITING);
879 return userDataStream;
880 }
881
882
883
884
885
886 void ensureBlockReady() throws IOException {
887 Preconditions.checkState(state != State.INIT,
888 "Unexpected state: " + state);
889
890 if (state == State.BLOCK_READY)
891 return;
892
893
894 finishBlock();
895 }
896
897
898
899
900
901
902
903 private void finishBlock() throws IOException {
904 if (blockType == BlockType.DATA) {
905 BufferGrabbingByteArrayOutputStream baosInMemoryCopy =
906 new BufferGrabbingByteArrayOutputStream();
907 baosInMemory.writeTo(baosInMemoryCopy);
908 this.dataBlockEncoder.endBlockEncoding(dataBlockEncodingCtx, userDataStream,
909 baosInMemoryCopy.buf, blockType);
910 blockType = dataBlockEncodingCtx.getBlockType();
911 }
912 userDataStream.flush();
913
914 uncompressedBytesWithHeader = baosInMemory.toByteArray();
915 prevOffset = prevOffsetByType[blockType.getId()];
916
917
918
919
920 state = State.BLOCK_READY;
921 if (blockType == BlockType.DATA || blockType == BlockType.ENCODED_DATA) {
922 onDiskBytesWithHeader = dataBlockEncodingCtx
923 .compressAndEncrypt(uncompressedBytesWithHeader);
924 } else {
925 onDiskBytesWithHeader = defaultBlockEncodingCtx
926 .compressAndEncrypt(uncompressedBytesWithHeader);
927 }
928 int numBytes = (int) ChecksumUtil.numBytes(
929 onDiskBytesWithHeader.length,
930 fileContext.getBytesPerChecksum());
931
932
933 putHeader(onDiskBytesWithHeader, 0,
934 onDiskBytesWithHeader.length + numBytes,
935 uncompressedBytesWithHeader.length, onDiskBytesWithHeader.length);
936
937 putHeader(uncompressedBytesWithHeader, 0,
938 onDiskBytesWithHeader.length + numBytes,
939 uncompressedBytesWithHeader.length, onDiskBytesWithHeader.length);
940
941 onDiskChecksum = new byte[numBytes];
942 ChecksumUtil.generateChecksums(
943 onDiskBytesWithHeader, 0, onDiskBytesWithHeader.length,
944 onDiskChecksum, 0, fileContext.getChecksumType(), fileContext.getBytesPerChecksum());
945 }
946
947 public static class BufferGrabbingByteArrayOutputStream extends ByteArrayOutputStream {
948 private byte[] buf;
949
950 @Override
951 public void write(byte[] b, int off, int len) {
952 this.buf = b;
953 }
954
955 public byte[] getBuffer() {
956 return this.buf;
957 }
958 }
959
960
961
962
963
964
965
966
967
968 private void putHeader(byte[] dest, int offset, int onDiskSize,
969 int uncompressedSize, int onDiskDataSize) {
970 offset = blockType.put(dest, offset);
971 offset = Bytes.putInt(dest, offset, onDiskSize - HConstants.HFILEBLOCK_HEADER_SIZE);
972 offset = Bytes.putInt(dest, offset, uncompressedSize - HConstants.HFILEBLOCK_HEADER_SIZE);
973 offset = Bytes.putLong(dest, offset, prevOffset);
974 offset = Bytes.putByte(dest, offset, fileContext.getChecksumType().getCode());
975 offset = Bytes.putInt(dest, offset, fileContext.getBytesPerChecksum());
976 Bytes.putInt(dest, offset, onDiskDataSize);
977 }
978
979
980
981
982
983
984
985
986
987 public void writeHeaderAndData(FSDataOutputStream out) throws IOException {
988 long offset = out.getPos();
989 if (startOffset != -1 && offset != startOffset) {
990 throw new IOException("A " + blockType + " block written to a "
991 + "stream twice, first at offset " + startOffset + ", then at "
992 + offset);
993 }
994 startOffset = offset;
995
996 finishBlockAndWriteHeaderAndData((DataOutputStream) out);
997 }
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008 protected void finishBlockAndWriteHeaderAndData(DataOutputStream out)
1009 throws IOException {
1010 ensureBlockReady();
1011 out.write(onDiskBytesWithHeader);
1012 out.write(onDiskChecksum);
1013 }
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025 byte[] getHeaderAndDataForTest() throws IOException {
1026 ensureBlockReady();
1027
1028
1029 byte[] output =
1030 new byte[onDiskBytesWithHeader.length
1031 + onDiskChecksum.length];
1032 System.arraycopy(onDiskBytesWithHeader, 0, output, 0,
1033 onDiskBytesWithHeader.length);
1034 System.arraycopy(onDiskChecksum, 0, output,
1035 onDiskBytesWithHeader.length, onDiskChecksum.length);
1036 return output;
1037 }
1038
1039
1040
1041
1042 public void release() {
1043 if (dataBlockEncodingCtx != null) {
1044 dataBlockEncodingCtx.close();
1045 dataBlockEncodingCtx = null;
1046 }
1047 if (defaultBlockEncodingCtx != null) {
1048 defaultBlockEncodingCtx.close();
1049 defaultBlockEncodingCtx = null;
1050 }
1051 }
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061 int getOnDiskSizeWithoutHeader() {
1062 expectState(State.BLOCK_READY);
1063 return onDiskBytesWithHeader.length + onDiskChecksum.length - HConstants.HFILEBLOCK_HEADER_SIZE;
1064 }
1065
1066
1067
1068
1069
1070
1071
1072
1073 int getOnDiskSizeWithHeader() {
1074 expectState(State.BLOCK_READY);
1075 return onDiskBytesWithHeader.length + onDiskChecksum.length;
1076 }
1077
1078
1079
1080
1081 int getUncompressedSizeWithoutHeader() {
1082 expectState(State.BLOCK_READY);
1083 return uncompressedBytesWithHeader.length - HConstants.HFILEBLOCK_HEADER_SIZE;
1084 }
1085
1086
1087
1088
1089 int getUncompressedSizeWithHeader() {
1090 expectState(State.BLOCK_READY);
1091 return uncompressedBytesWithHeader.length;
1092 }
1093
1094
1095 public boolean isWriting() {
1096 return state == State.WRITING;
1097 }
1098
1099
1100
1101
1102
1103
1104
1105
1106 public int blockSizeWritten() {
1107 if (state != State.WRITING) return 0;
1108 return this.unencodedDataSizeWritten;
1109 }
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119 ByteBuffer getUncompressedBufferWithHeader() {
1120 expectState(State.BLOCK_READY);
1121 return ByteBuffer.wrap(uncompressedBytesWithHeader);
1122 }
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132 ByteBuffer getOnDiskBufferWithHeader() {
1133 expectState(State.BLOCK_READY);
1134 return ByteBuffer.wrap(onDiskBytesWithHeader);
1135 }
1136
1137 private void expectState(State expectedState) {
1138 if (state != expectedState) {
1139 throw new IllegalStateException("Expected state: " + expectedState +
1140 ", actual state: " + state);
1141 }
1142 }
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154 public void writeBlock(BlockWritable bw, FSDataOutputStream out)
1155 throws IOException {
1156 bw.writeToBlock(startWriting(bw.getBlockType()));
1157 writeHeaderAndData(out);
1158 }
1159
1160
1161
1162
1163
1164
1165
1166
1167 public HFileBlock getBlockForCaching(CacheConfig cacheConf) {
1168 HFileContext newContext = new HFileContextBuilder()
1169 .withBlockSize(fileContext.getBlocksize())
1170 .withBytesPerCheckSum(0)
1171 .withChecksumType(ChecksumType.NULL)
1172 .withCompression(fileContext.getCompression())
1173 .withDataBlockEncoding(fileContext.getDataBlockEncoding())
1174 .withHBaseCheckSum(fileContext.isUseHBaseChecksum())
1175 .withCompressTags(fileContext.isCompressTags())
1176 .withIncludesMvcc(fileContext.isIncludesMvcc())
1177 .withIncludesTags(fileContext.isIncludesTags())
1178 .build();
1179 return new HFileBlock(blockType, getOnDiskSizeWithoutHeader(),
1180 getUncompressedSizeWithoutHeader(), prevOffset,
1181 cacheConf.shouldCacheCompressed(blockType.getCategory()) ?
1182 getOnDiskBufferWithHeader() :
1183 getUncompressedBufferWithHeader(),
1184 FILL_HEADER, startOffset,
1185 onDiskBytesWithHeader.length + onDiskChecksum.length, newContext);
1186 }
1187 }
1188
1189
1190 public interface BlockWritable {
1191
1192
1193 BlockType getBlockType();
1194
1195
1196
1197
1198
1199
1200
1201 void writeToBlock(DataOutput out) throws IOException;
1202 }
1203
1204
1205
1206
1207 public interface BlockIterator {
1208
1209
1210
1211
1212 HFileBlock nextBlock() throws IOException;
1213
1214
1215
1216
1217
1218 HFileBlock nextBlockWithBlockType(BlockType blockType) throws IOException;
1219 }
1220
1221
1222 public interface FSReader {
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235 HFileBlock readBlockData(long offset, long onDiskSize,
1236 int uncompressedSize, boolean pread) throws IOException;
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247 BlockIterator blockRange(long startOffset, long endOffset);
1248
1249
1250 void closeStreams() throws IOException;
1251
1252
1253 HFileBlockDecodingContext getBlockDecodingContext();
1254
1255
1256 HFileBlockDecodingContext getDefaultBlockDecodingContext();
1257 }
1258
1259
1260
1261
1262
1263 private abstract static class AbstractFSReader implements FSReader {
1264
1265
1266
1267 protected long fileSize;
1268
1269
1270 protected final int hdrSize;
1271
1272
1273 protected HFileSystem hfs;
1274
1275
1276 protected Path path;
1277
1278 private final Lock streamLock = new ReentrantLock();
1279
1280
1281 public static final int DEFAULT_BUFFER_SIZE = 1 << 20;
1282
1283 protected HFileContext fileContext;
1284
1285 public AbstractFSReader(long fileSize, HFileSystem hfs, Path path, HFileContext fileContext)
1286 throws IOException {
1287 this.fileSize = fileSize;
1288 this.hfs = hfs;
1289 this.path = path;
1290 this.fileContext = fileContext;
1291 this.hdrSize = headerSize(fileContext.isUseHBaseChecksum());
1292 }
1293
1294 @Override
1295 public BlockIterator blockRange(final long startOffset,
1296 final long endOffset) {
1297 final FSReader owner = this;
1298 return new BlockIterator() {
1299 private long offset = startOffset;
1300
1301 @Override
1302 public HFileBlock nextBlock() throws IOException {
1303 if (offset >= endOffset)
1304 return null;
1305 HFileBlock b = readBlockData(offset, -1, -1, false);
1306 offset += b.getOnDiskSizeWithHeader();
1307 return b.unpack(fileContext, owner);
1308 }
1309
1310 @Override
1311 public HFileBlock nextBlockWithBlockType(BlockType blockType)
1312 throws IOException {
1313 HFileBlock blk = nextBlock();
1314 if (blk.getBlockType() != blockType) {
1315 throw new IOException("Expected block of type " + blockType
1316 + " but found " + blk.getBlockType());
1317 }
1318 return blk;
1319 }
1320 };
1321 }
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338 protected int readAtOffset(FSDataInputStream istream,
1339 byte[] dest, int destOffset, int size,
1340 boolean peekIntoNextBlock, long fileOffset, boolean pread)
1341 throws IOException {
1342 if (peekIntoNextBlock &&
1343 destOffset + size + hdrSize > dest.length) {
1344
1345
1346 throw new IOException("Attempted to read " + size + " bytes and " +
1347 hdrSize + " bytes of next header into a " + dest.length +
1348 "-byte array at offset " + destOffset);
1349 }
1350
1351 if (!pread && streamLock.tryLock()) {
1352
1353 try {
1354 istream.seek(fileOffset);
1355
1356 long realOffset = istream.getPos();
1357 if (realOffset != fileOffset) {
1358 throw new IOException("Tried to seek to " + fileOffset + " to "
1359 + "read " + size + " bytes, but pos=" + realOffset
1360 + " after seek");
1361 }
1362
1363 if (!peekIntoNextBlock) {
1364 IOUtils.readFully(istream, dest, destOffset, size);
1365 return -1;
1366 }
1367
1368
1369 if (!readWithExtra(istream, dest, destOffset, size, hdrSize))
1370 return -1;
1371 } finally {
1372 streamLock.unlock();
1373 }
1374 } else {
1375
1376 int extraSize = peekIntoNextBlock ? hdrSize : 0;
1377 int ret = istream.read(fileOffset, dest, destOffset, size + extraSize);
1378 if (ret < size) {
1379 throw new IOException("Positional read of " + size + " bytes " +
1380 "failed at offset " + fileOffset + " (returned " + ret + ")");
1381 }
1382
1383 if (ret == size || ret < size + extraSize) {
1384
1385 return -1;
1386 }
1387 }
1388
1389 assert peekIntoNextBlock;
1390 return Bytes.toInt(dest, destOffset + size + BlockType.MAGIC_LENGTH) + hdrSize;
1391 }
1392
1393 }
1394
1395
1396
1397
1398
1399 private static class PrefetchedHeader {
1400 long offset = -1;
1401 byte[] header = new byte[HConstants.HFILEBLOCK_HEADER_SIZE];
1402 final ByteBuffer buf = ByteBuffer.wrap(header, 0, HConstants.HFILEBLOCK_HEADER_SIZE);
1403 }
1404
1405
1406 static class FSReaderImpl extends AbstractFSReader {
1407
1408
1409 protected FSDataInputStreamWrapper streamWrapper;
1410
1411 private HFileBlockDecodingContext encodedBlockDecodingCtx;
1412
1413
1414 private final HFileBlockDefaultDecodingContext defaultDecodingCtx;
1415
1416 private ThreadLocal<PrefetchedHeader> prefetchedHeaderForThread =
1417 new ThreadLocal<PrefetchedHeader>() {
1418 @Override
1419 public PrefetchedHeader initialValue() {
1420 return new PrefetchedHeader();
1421 }
1422 };
1423
1424 public FSReaderImpl(FSDataInputStreamWrapper stream, long fileSize, HFileSystem hfs, Path path,
1425 HFileContext fileContext) throws IOException {
1426 super(fileSize, hfs, path, fileContext);
1427 this.streamWrapper = stream;
1428
1429 this.streamWrapper.prepareForBlockReader(!fileContext.isUseHBaseChecksum());
1430 defaultDecodingCtx = new HFileBlockDefaultDecodingContext(fileContext);
1431 encodedBlockDecodingCtx = defaultDecodingCtx;
1432 }
1433
1434
1435
1436
1437
1438 FSReaderImpl(FSDataInputStream istream, long fileSize, HFileContext fileContext)
1439 throws IOException {
1440 this(new FSDataInputStreamWrapper(istream), fileSize, null, null, fileContext);
1441 }
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454 @Override
1455 public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL,
1456 int uncompressedSize, boolean pread)
1457 throws IOException {
1458
1459
1460
1461
1462
1463
1464 boolean doVerificationThruHBaseChecksum = streamWrapper.shouldUseHBaseChecksum();
1465 FSDataInputStream is = streamWrapper.getStream(doVerificationThruHBaseChecksum);
1466
1467 HFileBlock blk = readBlockDataInternal(is, offset,
1468 onDiskSizeWithHeaderL,
1469 uncompressedSize, pread,
1470 doVerificationThruHBaseChecksum);
1471 if (blk == null) {
1472 HFile.LOG.warn("HBase checksum verification failed for file " +
1473 path + " at offset " +
1474 offset + " filesize " + fileSize +
1475 ". Retrying read with HDFS checksums turned on...");
1476
1477 if (!doVerificationThruHBaseChecksum) {
1478 String msg = "HBase checksum verification failed for file " +
1479 path + " at offset " +
1480 offset + " filesize " + fileSize +
1481 " but this cannot happen because doVerify is " +
1482 doVerificationThruHBaseChecksum;
1483 HFile.LOG.warn(msg);
1484 throw new IOException(msg);
1485 }
1486 HFile.checksumFailures.incrementAndGet();
1487
1488
1489
1490
1491
1492
1493
1494 is = this.streamWrapper.fallbackToFsChecksum(CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD);
1495 doVerificationThruHBaseChecksum = false;
1496 blk = readBlockDataInternal(is, offset, onDiskSizeWithHeaderL,
1497 uncompressedSize, pread,
1498 doVerificationThruHBaseChecksum);
1499 if (blk != null) {
1500 HFile.LOG.warn("HDFS checksum verification suceeded for file " +
1501 path + " at offset " +
1502 offset + " filesize " + fileSize);
1503 }
1504 }
1505 if (blk == null && !doVerificationThruHBaseChecksum) {
1506 String msg = "readBlockData failed, possibly due to " +
1507 "checksum verification failed for file " + path +
1508 " at offset " + offset + " filesize " + fileSize;
1509 HFile.LOG.warn(msg);
1510 throw new IOException(msg);
1511 }
1512
1513
1514
1515
1516
1517
1518
1519
1520 streamWrapper.checksumOk();
1521 return blk;
1522 }
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537 private HFileBlock readBlockDataInternal(FSDataInputStream is, long offset,
1538 long onDiskSizeWithHeaderL, int uncompressedSize, boolean pread,
1539 boolean verifyChecksum)
1540 throws IOException {
1541 if (offset < 0) {
1542 throw new IOException("Invalid offset=" + offset + " trying to read "
1543 + "block (onDiskSize=" + onDiskSizeWithHeaderL
1544 + ", uncompressedSize=" + uncompressedSize + ")");
1545 }
1546
1547 if (uncompressedSize != -1) {
1548 throw new IOException("Version 2 block reader API does not need " +
1549 "the uncompressed size parameter");
1550 }
1551
1552 if ((onDiskSizeWithHeaderL < hdrSize && onDiskSizeWithHeaderL != -1)
1553 || onDiskSizeWithHeaderL >= Integer.MAX_VALUE) {
1554 throw new IOException("Invalid onDisksize=" + onDiskSizeWithHeaderL
1555 + ": expected to be at least " + hdrSize
1556 + " and at most " + Integer.MAX_VALUE + ", or -1 (offset="
1557 + offset + ", uncompressedSize=" + uncompressedSize + ")");
1558 }
1559
1560 int onDiskSizeWithHeader = (int) onDiskSizeWithHeaderL;
1561
1562
1563
1564
1565
1566
1567
1568 PrefetchedHeader prefetchedHeader = prefetchedHeaderForThread.get();
1569 ByteBuffer headerBuf = prefetchedHeader.offset == offset? prefetchedHeader.buf: null;
1570
1571
1572 int nextBlockOnDiskSize = 0;
1573 byte[] onDiskBlock = null;
1574
1575 HFileBlock b = null;
1576 if (onDiskSizeWithHeader > 0) {
1577
1578
1579
1580
1581
1582
1583
1584
1585 int preReadHeaderSize = headerBuf == null ? 0 : hdrSize;
1586 onDiskBlock = new byte[onDiskSizeWithHeader + hdrSize];
1587
1588 nextBlockOnDiskSize = readAtOffset(is, onDiskBlock,
1589 preReadHeaderSize, onDiskSizeWithHeader - preReadHeaderSize,
1590 true, offset + preReadHeaderSize, pread);
1591 if (headerBuf != null) {
1592
1593
1594
1595 assert headerBuf.hasArray();
1596 System.arraycopy(headerBuf.array(),
1597 headerBuf.arrayOffset(), onDiskBlock, 0, hdrSize);
1598 } else {
1599 headerBuf = ByteBuffer.wrap(onDiskBlock, 0, hdrSize);
1600 }
1601
1602 try {
1603
1604 b = new HFileBlock(headerBuf, fileContext.isUseHBaseChecksum());
1605 } catch (IOException ex) {
1606
1607 throw new IOException("Failed to read compressed block at "
1608 + offset
1609 + ", onDiskSizeWithoutHeader="
1610 + onDiskSizeWithHeader
1611 + ", preReadHeaderSize="
1612 + hdrSize
1613 + ", header.length="
1614 + prefetchedHeader.header.length
1615 + ", header bytes: "
1616 + Bytes.toStringBinary(prefetchedHeader.header, 0,
1617 hdrSize), ex);
1618 }
1619
1620 int onDiskSizeWithoutHeader = onDiskSizeWithHeader - hdrSize;
1621 assert onDiskSizeWithoutHeader >= 0;
1622 b.validateOnDiskSizeWithoutHeader(onDiskSizeWithoutHeader);
1623 } else {
1624
1625
1626
1627
1628
1629
1630
1631
1632 if (headerBuf == null) {
1633
1634
1635
1636
1637
1638 headerBuf = ByteBuffer.allocate(hdrSize);
1639
1640 readAtOffset(is, headerBuf.array(), headerBuf.arrayOffset(),
1641 hdrSize, false, offset, pread);
1642 }
1643
1644 b = new HFileBlock(headerBuf, fileContext.isUseHBaseChecksum());
1645 onDiskBlock = new byte[b.getOnDiskSizeWithHeader() + hdrSize];
1646
1647 System.arraycopy(headerBuf.array(), headerBuf.arrayOffset(), onDiskBlock, 0, hdrSize);
1648 nextBlockOnDiskSize =
1649 readAtOffset(is, onDiskBlock, hdrSize, b.getOnDiskSizeWithHeader()
1650 - hdrSize, true, offset + hdrSize, pread);
1651 onDiskSizeWithHeader = b.onDiskSizeWithoutHeader + hdrSize;
1652 }
1653
1654 if (!fileContext.isCompressedOrEncrypted()) {
1655 b.assumeUncompressed();
1656 }
1657
1658 if (verifyChecksum && !validateBlockChecksum(b, onDiskBlock, hdrSize)) {
1659 return null;
1660 }
1661
1662
1663
1664
1665
1666 b = new HFileBlock(ByteBuffer.wrap(onDiskBlock, 0, onDiskSizeWithHeader),
1667 this.fileContext.isUseHBaseChecksum());
1668
1669 b.nextBlockOnDiskSizeWithHeader = nextBlockOnDiskSize;
1670
1671
1672 if (b.hasNextBlockHeader()) {
1673 prefetchedHeader.offset = offset + b.getOnDiskSizeWithHeader();
1674 System.arraycopy(onDiskBlock, onDiskSizeWithHeader, prefetchedHeader.header, 0, hdrSize);
1675 }
1676
1677 b.offset = offset;
1678 b.fileContext.setIncludesTags(this.fileContext.isIncludesTags());
1679 b.fileContext.setIncludesMvcc(this.fileContext.isIncludesMvcc());
1680 return b;
1681 }
1682
1683 void setIncludesMemstoreTS(boolean includesMemstoreTS) {
1684 this.fileContext.setIncludesMvcc(includesMemstoreTS);
1685 }
1686
1687 void setDataBlockEncoder(HFileDataBlockEncoder encoder) {
1688 encodedBlockDecodingCtx = encoder.newDataBlockDecodingContext(this.fileContext);
1689 }
1690
1691 @Override
1692 public HFileBlockDecodingContext getBlockDecodingContext() {
1693 return this.encodedBlockDecodingCtx;
1694 }
1695
1696 @Override
1697 public HFileBlockDecodingContext getDefaultBlockDecodingContext() {
1698 return this.defaultDecodingCtx;
1699 }
1700
1701
1702
1703
1704
1705
1706
1707 protected boolean validateBlockChecksum(HFileBlock block, byte[] data, int hdrSize)
1708 throws IOException {
1709 return ChecksumUtil.validateBlockChecksum(path, block, data, hdrSize);
1710 }
1711
1712 @Override
1713 public void closeStreams() throws IOException {
1714 streamWrapper.close();
1715 }
1716
1717 @Override
1718 public String toString() {
1719 return "hfs=" + hfs + ", path=" + path + ", fileContext=" + fileContext;
1720 }
1721 }
1722
1723 @Override
1724 public int getSerializedLength() {
1725 if (buf != null) {
1726
1727 int extraSpace = hasNextBlockHeader() ? headerSize() : 0;
1728 return this.buf.limit() + extraSpace + HFileBlock.EXTRA_SERIALIZATION_SPACE;
1729 }
1730 return 0;
1731 }
1732
1733 @Override
1734 public void serialize(ByteBuffer destination) {
1735 ByteBufferUtils.copyFromBufferToBuffer(destination, this.buf, 0, getSerializedLength()
1736 - EXTRA_SERIALIZATION_SPACE);
1737 serializeExtraInfo(destination);
1738 }
1739
1740 public void serializeExtraInfo(ByteBuffer destination) {
1741 destination.put(this.fileContext.isUseHBaseChecksum() ? (byte) 1 : (byte) 0);
1742 destination.putLong(this.offset);
1743 destination.putInt(this.nextBlockOnDiskSizeWithHeader);
1744 destination.rewind();
1745 }
1746
1747 @Override
1748 public CacheableDeserializer<Cacheable> getDeserializer() {
1749 return HFileBlock.blockDeserializer;
1750 }
1751
1752 @Override
1753 public boolean equals(Object comparison) {
1754 if (this == comparison) {
1755 return true;
1756 }
1757 if (comparison == null) {
1758 return false;
1759 }
1760 if (comparison.getClass() != this.getClass()) {
1761 return false;
1762 }
1763
1764 HFileBlock castedComparison = (HFileBlock) comparison;
1765
1766 if (castedComparison.blockType != this.blockType) {
1767 return false;
1768 }
1769 if (castedComparison.nextBlockOnDiskSizeWithHeader != this.nextBlockOnDiskSizeWithHeader) {
1770 return false;
1771 }
1772 if (castedComparison.offset != this.offset) {
1773 return false;
1774 }
1775 if (castedComparison.onDiskSizeWithoutHeader != this.onDiskSizeWithoutHeader) {
1776 return false;
1777 }
1778 if (castedComparison.prevBlockOffset != this.prevBlockOffset) {
1779 return false;
1780 }
1781 if (castedComparison.uncompressedSizeWithoutHeader != this.uncompressedSizeWithoutHeader) {
1782 return false;
1783 }
1784 if (ByteBufferUtils.compareTo(this.buf, 0, this.buf.limit(), castedComparison.buf, 0,
1785 castedComparison.buf.limit()) != 0) {
1786 return false;
1787 }
1788 return true;
1789 }
1790
1791 public DataBlockEncoding getDataBlockEncoding() {
1792 if (blockType == BlockType.ENCODED_DATA) {
1793 return DataBlockEncoding.getEncodingById(getDataBlockEncodingId());
1794 }
1795 return DataBlockEncoding.NONE;
1796 }
1797
1798 byte getChecksumType() {
1799 return this.fileContext.getChecksumType().getCode();
1800 }
1801
1802 int getBytesPerChecksum() {
1803 return this.fileContext.getBytesPerChecksum();
1804 }
1805
1806
1807 int getOnDiskDataSizeWithHeader() {
1808 return this.onDiskDataSizeWithHeader;
1809 }
1810
1811
1812
1813
1814
1815 int totalChecksumBytes() {
1816
1817
1818
1819
1820 if (!fileContext.isUseHBaseChecksum() || this.fileContext.getBytesPerChecksum() == 0) {
1821 return 0;
1822 }
1823 return (int)ChecksumUtil.numBytes(onDiskDataSizeWithHeader, this.fileContext.getBytesPerChecksum());
1824 }
1825
1826
1827
1828
1829 public int headerSize() {
1830 return headerSize(this.fileContext.isUseHBaseChecksum());
1831 }
1832
1833
1834
1835
1836 public static int headerSize(boolean usesHBaseChecksum) {
1837 if (usesHBaseChecksum) {
1838 return HConstants.HFILEBLOCK_HEADER_SIZE;
1839 }
1840 return HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
1841 }
1842
1843
1844
1845
1846 public byte[] getDummyHeaderForVersion() {
1847 return getDummyHeaderForVersion(this.fileContext.isUseHBaseChecksum());
1848 }
1849
1850
1851
1852
1853 static private byte[] getDummyHeaderForVersion(boolean usesHBaseChecksum) {
1854 if (usesHBaseChecksum) {
1855 return HConstants.HFILEBLOCK_DUMMY_HEADER;
1856 }
1857 return DUMMY_HEADER_NO_CHECKSUM;
1858 }
1859
1860
1861
1862
1863
1864 public HFileContext getHFileContext() {
1865 return this.fileContext;
1866 }
1867
1868
1869
1870
1871
1872
1873 static String toStringHeader(ByteBuffer buf) throws IOException {
1874 byte[] magicBuf = new byte[Math.min(buf.limit() - buf.position(), BlockType.MAGIC_LENGTH)];
1875 buf.get(magicBuf);
1876 BlockType bt = BlockType.parse(magicBuf, 0, BlockType.MAGIC_LENGTH);
1877 int compressedBlockSizeNoHeader = buf.getInt();;
1878 int uncompressedBlockSizeNoHeader = buf.getInt();;
1879 long prevBlockOffset = buf.getLong();
1880 byte cksumtype = buf.get();
1881 long bytesPerChecksum = buf.getInt();
1882 long onDiskDataSizeWithHeader = buf.getInt();
1883 return " Header dump: magic: " + Bytes.toString(magicBuf) +
1884 " blockType " + bt +
1885 " compressedBlockSizeNoHeader " +
1886 compressedBlockSizeNoHeader +
1887 " uncompressedBlockSizeNoHeader " +
1888 uncompressedBlockSizeNoHeader +
1889 " prevBlockOffset " + prevBlockOffset +
1890 " checksumType " + ChecksumType.codeToType(cksumtype) +
1891 " bytesPerChecksum " + bytesPerChecksum +
1892 " onDiskDataSizeWithHeader " + onDiskDataSizeWithHeader;
1893 }
1894 }