1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.io.hfile;
19
20 import java.io.ByteArrayInputStream;
21 import java.io.ByteArrayOutputStream;
22 import java.io.DataInputStream;
23 import java.io.DataOutput;
24 import java.io.DataOutputStream;
25 import java.io.IOException;
26 import java.io.InputStream;
27 import java.nio.ByteBuffer;
28 import java.util.concurrent.locks.Lock;
29 import java.util.concurrent.locks.ReentrantLock;
30
31 import org.apache.hadoop.hbase.classification.InterfaceAudience;
32 import org.apache.hadoop.fs.FSDataInputStream;
33 import org.apache.hadoop.fs.FSDataOutputStream;
34 import org.apache.hadoop.fs.Path;
35 import org.apache.hadoop.hbase.HConstants;
36 import org.apache.hadoop.hbase.fs.HFileSystem;
37 import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
38 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
39 import org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext;
40 import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext;
41 import org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultEncodingContext;
42 import org.apache.hadoop.hbase.io.encoding.HFileBlockEncodingContext;
43 import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
44 import org.apache.hadoop.hbase.util.Bytes;
45 import org.apache.hadoop.hbase.util.ChecksumType;
46 import org.apache.hadoop.hbase.util.ClassSize;
47 import org.apache.hadoop.hbase.util.CompoundBloomFilter;
48 import org.apache.hadoop.io.IOUtils;
49
50 import com.google.common.annotations.VisibleForTesting;
51 import com.google.common.base.Preconditions;
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85 @InterfaceAudience.Private
86 public class HFileBlock implements Cacheable {
87
88
89
90
91
92
93 static final int CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD = 3;
94
95 public static final boolean FILL_HEADER = true;
96 public static final boolean DONT_FILL_HEADER = false;
97
98
99
100
101
102 public static final int ENCODED_HEADER_SIZE = HConstants.HFILEBLOCK_HEADER_SIZE
103 + DataBlockEncoding.ID_SIZE;
104
105 static final byte[] DUMMY_HEADER_NO_CHECKSUM =
106 new byte[HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM];
107
108 public static final int BYTE_BUFFER_HEAP_SIZE = (int) ClassSize.estimateBase(
109 ByteBuffer.wrap(new byte[0], 0, 0).getClass(), false);
110
111
112 public static final int EXTRA_SERIALIZATION_SPACE = Bytes.SIZEOF_BYTE + Bytes.SIZEOF_INT
113 + Bytes.SIZEOF_LONG;
114
115
116
117
118 static final int CHECKSUM_SIZE = Bytes.SIZEOF_INT;
119
120 private static final CacheableDeserializer<Cacheable> blockDeserializer =
121 new CacheableDeserializer<Cacheable>() {
122 public HFileBlock deserialize(ByteBuffer buf, boolean reuse) throws IOException{
123 buf.limit(buf.limit() - HFileBlock.EXTRA_SERIALIZATION_SPACE).rewind();
124 ByteBuffer newByteBuffer;
125 if (reuse) {
126 newByteBuffer = buf.slice();
127 } else {
128 newByteBuffer = ByteBuffer.allocate(buf.limit());
129 newByteBuffer.put(buf);
130 }
131 buf.position(buf.limit());
132 buf.limit(buf.limit() + HFileBlock.EXTRA_SERIALIZATION_SPACE);
133 boolean usesChecksum = buf.get() == (byte)1;
134 HFileBlock ourBuffer = new HFileBlock(newByteBuffer, usesChecksum);
135 ourBuffer.offset = buf.getLong();
136 ourBuffer.nextBlockOnDiskSizeWithHeader = buf.getInt();
137 if (ourBuffer.hasNextBlockHeader()) {
138 ourBuffer.buf.limit(ourBuffer.buf.limit() - ourBuffer.headerSize());
139 }
140 return ourBuffer;
141 }
142
143 @Override
144 public int getDeserialiserIdentifier() {
145 return deserializerIdentifier;
146 }
147
148 @Override
149 public HFileBlock deserialize(ByteBuffer b) throws IOException {
150 return deserialize(b, false);
151 }
152 };
153 private static final int deserializerIdentifier;
154 static {
155 deserializerIdentifier = CacheableDeserializerIdManager
156 .registerDeserializer(blockDeserializer);
157 }
158
159
160 private BlockType blockType;
161
162
163 private int onDiskSizeWithoutHeader;
164
165
166 private final int uncompressedSizeWithoutHeader;
167
168
169 private final long prevBlockOffset;
170
171
172
173
174
175 private final int onDiskDataSizeWithHeader;
176
177
178 private ByteBuffer buf;
179
180
181 private HFileContext fileContext;
182
183
184
185
186
187 private long offset = -1;
188
189
190
191
192
193
194 private int nextBlockOnDiskSizeWithHeader = -1;
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212 HFileBlock(BlockType blockType, int onDiskSizeWithoutHeader, int uncompressedSizeWithoutHeader,
213 long prevBlockOffset, ByteBuffer buf, boolean fillHeader, long offset,
214 int onDiskDataSizeWithHeader, HFileContext fileContext) {
215 this.blockType = blockType;
216 this.onDiskSizeWithoutHeader = onDiskSizeWithoutHeader;
217 this.uncompressedSizeWithoutHeader = uncompressedSizeWithoutHeader;
218 this.prevBlockOffset = prevBlockOffset;
219 this.buf = buf;
220 this.offset = offset;
221 this.onDiskDataSizeWithHeader = onDiskDataSizeWithHeader;
222 this.fileContext = fileContext;
223 if (fillHeader)
224 overwriteHeader();
225 this.buf.rewind();
226 }
227
228
229
230
231 HFileBlock(HFileBlock that) {
232 this.blockType = that.blockType;
233 this.onDiskSizeWithoutHeader = that.onDiskSizeWithoutHeader;
234 this.uncompressedSizeWithoutHeader = that.uncompressedSizeWithoutHeader;
235 this.prevBlockOffset = that.prevBlockOffset;
236 this.buf = that.buf.duplicate();
237 this.offset = that.offset;
238 this.onDiskDataSizeWithHeader = that.onDiskDataSizeWithHeader;
239 this.fileContext = that.fileContext;
240 this.nextBlockOnDiskSizeWithHeader = that.nextBlockOnDiskSizeWithHeader;
241 }
242
243
244
245
246
247
248
249
250
251 HFileBlock(ByteBuffer b, boolean usesHBaseChecksum) throws IOException {
252 b.rewind();
253 blockType = BlockType.read(b);
254 onDiskSizeWithoutHeader = b.getInt();
255 uncompressedSizeWithoutHeader = b.getInt();
256 prevBlockOffset = b.getLong();
257 HFileContextBuilder contextBuilder = new HFileContextBuilder();
258 contextBuilder.withHBaseCheckSum(usesHBaseChecksum);
259 if (usesHBaseChecksum) {
260 contextBuilder.withChecksumType(ChecksumType.codeToType(b.get()));
261 contextBuilder.withBytesPerCheckSum(b.getInt());
262 this.onDiskDataSizeWithHeader = b.getInt();
263 } else {
264 contextBuilder.withChecksumType(ChecksumType.NULL);
265 contextBuilder.withBytesPerCheckSum(0);
266 this.onDiskDataSizeWithHeader = onDiskSizeWithoutHeader +
267 HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
268 }
269 this.fileContext = contextBuilder.build();
270 buf = b;
271 buf.rewind();
272 }
273
274 public BlockType getBlockType() {
275 return blockType;
276 }
277
278
279 public short getDataBlockEncodingId() {
280 if (blockType != BlockType.ENCODED_DATA) {
281 throw new IllegalArgumentException("Querying encoder ID of a block " +
282 "of type other than " + BlockType.ENCODED_DATA + ": " + blockType);
283 }
284 return buf.getShort(headerSize());
285 }
286
287
288
289
290 public int getOnDiskSizeWithHeader() {
291 return onDiskSizeWithoutHeader + headerSize();
292 }
293
294
295
296
297 public int getOnDiskSizeWithoutHeader() {
298 return onDiskSizeWithoutHeader;
299 }
300
301
302
303
304 public int getUncompressedSizeWithoutHeader() {
305 return uncompressedSizeWithoutHeader;
306 }
307
308
309
310
311
312 public long getPrevBlockOffset() {
313 return prevBlockOffset;
314 }
315
316
317
318
319
320 private void overwriteHeader() {
321 buf.rewind();
322 blockType.write(buf);
323 buf.putInt(onDiskSizeWithoutHeader);
324 buf.putInt(uncompressedSizeWithoutHeader);
325 buf.putLong(prevBlockOffset);
326 if (this.fileContext.isUseHBaseChecksum()) {
327 buf.put(fileContext.getChecksumType().getCode());
328 buf.putInt(fileContext.getBytesPerChecksum());
329 buf.putInt(onDiskDataSizeWithHeader);
330 }
331 }
332
333
334
335
336
337
338 public ByteBuffer getBufferWithoutHeader() {
339 return ByteBuffer.wrap(buf.array(), buf.arrayOffset() + headerSize(),
340 buf.limit() - headerSize() - totalChecksumBytes()).slice();
341 }
342
343
344
345
346
347
348
349
350
351
352 public ByteBuffer getBufferReadOnly() {
353 return ByteBuffer.wrap(buf.array(), buf.arrayOffset(),
354 buf.limit() - totalChecksumBytes()).slice();
355 }
356
357
358
359
360
361
362
363
364 public ByteBuffer getBufferReadOnlyWithHeader() {
365 return ByteBuffer.wrap(buf.array(), buf.arrayOffset(), buf.limit()).slice();
366 }
367
368
369
370
371
372
373
374 ByteBuffer getBufferWithHeader() {
375 ByteBuffer dupBuf = buf.duplicate();
376 dupBuf.rewind();
377 return dupBuf;
378 }
379
380 private void sanityCheckAssertion(long valueFromBuf, long valueFromField,
381 String fieldName) throws IOException {
382 if (valueFromBuf != valueFromField) {
383 throw new AssertionError(fieldName + " in the buffer (" + valueFromBuf
384 + ") is different from that in the field (" + valueFromField + ")");
385 }
386 }
387
388 private void sanityCheckAssertion(BlockType valueFromBuf, BlockType valueFromField)
389 throws IOException {
390 if (valueFromBuf != valueFromField) {
391 throw new IOException("Block type stored in the buffer: " +
392 valueFromBuf + ", block type field: " + valueFromField);
393 }
394 }
395
396
397
398
399
400
401
402
403 void sanityCheck() throws IOException {
404 buf.rewind();
405
406 sanityCheckAssertion(BlockType.read(buf), blockType);
407
408 sanityCheckAssertion(buf.getInt(), onDiskSizeWithoutHeader,
409 "onDiskSizeWithoutHeader");
410
411 sanityCheckAssertion(buf.getInt(), uncompressedSizeWithoutHeader,
412 "uncompressedSizeWithoutHeader");
413
414 sanityCheckAssertion(buf.getLong(), prevBlockOffset, "prevBlocKOffset");
415 if (this.fileContext.isUseHBaseChecksum()) {
416 sanityCheckAssertion(buf.get(), this.fileContext.getChecksumType().getCode(), "checksumType");
417 sanityCheckAssertion(buf.getInt(), this.fileContext.getBytesPerChecksum(), "bytesPerChecksum");
418 sanityCheckAssertion(buf.getInt(), onDiskDataSizeWithHeader, "onDiskDataSizeWithHeader");
419 }
420
421 int cksumBytes = totalChecksumBytes();
422 int expectedBufLimit = onDiskDataSizeWithHeader + cksumBytes;
423 if (buf.limit() != expectedBufLimit) {
424 throw new AssertionError("Expected buffer limit " + expectedBufLimit
425 + ", got " + buf.limit());
426 }
427
428
429
430 int hdrSize = headerSize();
431 if (buf.capacity() != expectedBufLimit &&
432 buf.capacity() != expectedBufLimit + hdrSize) {
433 throw new AssertionError("Invalid buffer capacity: " + buf.capacity() +
434 ", expected " + expectedBufLimit + " or " + (expectedBufLimit + hdrSize));
435 }
436 }
437
438 @Override
439 public String toString() {
440 StringBuilder sb = new StringBuilder()
441 .append("HFileBlock [")
442 .append(" fileOffset=").append(offset)
443 .append(" headerSize()=").append(headerSize())
444 .append(" blockType=").append(blockType)
445 .append(" onDiskSizeWithoutHeader=").append(onDiskSizeWithoutHeader)
446 .append(" uncompressedSizeWithoutHeader=").append(uncompressedSizeWithoutHeader)
447 .append(" prevBlockOffset=").append(prevBlockOffset)
448 .append(" isUseHBaseChecksum()=").append(fileContext.isUseHBaseChecksum());
449 if (fileContext.isUseHBaseChecksum()) {
450 sb.append(" checksumType=").append(ChecksumType.codeToType(this.buf.get(24)))
451 .append(" bytesPerChecksum=").append(this.buf.getInt(24 + 1))
452 .append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader);
453 } else {
454 sb.append(" onDiskDataSizeWithHeader=").append(onDiskDataSizeWithHeader)
455 .append("(").append(onDiskSizeWithoutHeader)
456 .append("+").append(HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM).append(")");
457 }
458 sb.append(" getOnDiskSizeWithHeader()=").append(getOnDiskSizeWithHeader())
459 .append(" totalChecksumBytes()=").append(totalChecksumBytes())
460 .append(" isUnpacked()=").append(isUnpacked())
461 .append(" buf=[ ")
462 .append(buf)
463 .append(", array().length=").append(buf.array().length)
464 .append(", arrayOffset()=").append(buf.arrayOffset())
465 .append(" ]")
466 .append(" dataBeginsWith=")
467 .append(Bytes.toStringBinary(buf.array(), buf.arrayOffset() + headerSize(),
468 Math.min(32, buf.limit() - buf.arrayOffset() - headerSize())))
469 .append(" fileContext=").append(fileContext)
470 .append(" ]");
471 return sb.toString();
472 }
473
474
475
476
477 private void validateOnDiskSizeWithoutHeader(
478 int expectedOnDiskSizeWithoutHeader) throws IOException {
479 if (onDiskSizeWithoutHeader != expectedOnDiskSizeWithoutHeader) {
480 String blockInfoMsg =
481 "Block offset: " + offset + ", data starts with: "
482 + Bytes.toStringBinary(buf.array(), buf.arrayOffset(),
483 buf.arrayOffset() + Math.min(32, buf.limit()));
484 throw new IOException("On-disk size without header provided is "
485 + expectedOnDiskSizeWithoutHeader + ", but block "
486 + "header contains " + onDiskSizeWithoutHeader + ". " +
487 blockInfoMsg);
488 }
489 }
490
491
492
493
494
495 HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {
496 if (!fileContext.isCompressedOrEncrypted()) {
497
498
499
500 return this;
501 }
502
503 HFileBlock unpacked = new HFileBlock(this);
504 unpacked.allocateBuffer();
505
506 HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?
507 reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();
508 ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),
509 unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),
510 this.getBufferReadOnlyWithHeader().array(), this.headerSize());
511
512
513 if (unpacked.hasNextBlockHeader()) {
514 System.arraycopy(this.buf.array(), this.buf.arrayOffset() + this.onDiskDataSizeWithHeader,
515 unpacked.buf.array(), unpacked.buf.arrayOffset() + unpacked.headerSize() +
516 unpacked.uncompressedSizeWithoutHeader + unpacked.totalChecksumBytes(),
517 unpacked.headerSize());
518 }
519 return unpacked;
520 }
521
522
523
524
525 private boolean hasNextBlockHeader() {
526 return nextBlockOnDiskSizeWithHeader > 0;
527 }
528
529
530
531
532
533
534 private void allocateBuffer() {
535 int cksumBytes = totalChecksumBytes();
536 int headerSize = headerSize();
537 int capacityNeeded = headerSize + uncompressedSizeWithoutHeader +
538 cksumBytes + (hasNextBlockHeader() ? headerSize : 0);
539
540 ByteBuffer newBuf = ByteBuffer.allocate(capacityNeeded);
541
542
543 System.arraycopy(buf.array(), buf.arrayOffset(), newBuf.array(),
544 newBuf.arrayOffset(), headerSize);
545
546 buf = newBuf;
547
548 buf.limit(headerSize + uncompressedSizeWithoutHeader + cksumBytes);
549 }
550
551
552
553
554
555 public boolean isUnpacked() {
556 final int cksumBytes = totalChecksumBytes();
557 final int headerSize = headerSize();
558 final int expectedCapacity = headerSize + uncompressedSizeWithoutHeader + cksumBytes;
559 final int bufCapacity = buf.capacity();
560 return bufCapacity == expectedCapacity || bufCapacity == expectedCapacity + headerSize;
561 }
562
563
564 public void assumeUncompressed() throws IOException {
565 if (onDiskSizeWithoutHeader != uncompressedSizeWithoutHeader +
566 totalChecksumBytes()) {
567 throw new IOException("Using no compression but "
568 + "onDiskSizeWithoutHeader=" + onDiskSizeWithoutHeader + ", "
569 + "uncompressedSizeWithoutHeader=" + uncompressedSizeWithoutHeader
570 + ", numChecksumbytes=" + totalChecksumBytes());
571 }
572 }
573
574
575
576
577
578 public void expectType(BlockType expectedType) throws IOException {
579 if (blockType != expectedType) {
580 throw new IOException("Invalid block type: expected=" + expectedType
581 + ", actual=" + blockType);
582 }
583 }
584
585
586 public long getOffset() {
587 if (offset < 0) {
588 throw new IllegalStateException(
589 "HFile block offset not initialized properly");
590 }
591 return offset;
592 }
593
594
595
596
597 public DataInputStream getByteStream() {
598 return new DataInputStream(new ByteArrayInputStream(buf.array(),
599 buf.arrayOffset() + headerSize(), buf.limit() - headerSize()));
600 }
601
602 @Override
603 public long heapSize() {
604 long size = ClassSize.align(
605 ClassSize.OBJECT +
606
607 3 * ClassSize.REFERENCE +
608
609
610 4 * Bytes.SIZEOF_INT +
611
612 2 * Bytes.SIZEOF_LONG +
613
614 fileContext.heapSize()
615 );
616
617 if (buf != null) {
618
619 size += ClassSize.align(buf.capacity() + BYTE_BUFFER_HEAP_SIZE);
620 }
621
622 return ClassSize.align(size);
623 }
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640 public static boolean readWithExtra(InputStream in, byte buf[],
641 int bufOffset, int necessaryLen, int extraLen) throws IOException {
642 int bytesRemaining = necessaryLen + extraLen;
643 while (bytesRemaining > 0) {
644 int ret = in.read(buf, bufOffset, bytesRemaining);
645 if (ret == -1 && bytesRemaining <= extraLen) {
646
647 break;
648 }
649
650 if (ret < 0) {
651 throw new IOException("Premature EOF from inputStream (read "
652 + "returned " + ret + ", was trying to read " + necessaryLen
653 + " necessary bytes and " + extraLen + " extra bytes, "
654 + "successfully read "
655 + (necessaryLen + extraLen - bytesRemaining));
656 }
657 bufOffset += ret;
658 bytesRemaining -= ret;
659 }
660 return bytesRemaining <= 0;
661 }
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680 @VisibleForTesting
681 static boolean positionalReadWithExtra(FSDataInputStream in,
682 long position, byte[] buf, int bufOffset, int necessaryLen, int extraLen)
683 throws IOException {
684 int bytesRemaining = necessaryLen + extraLen;
685 int bytesRead = 0;
686 while (bytesRead < necessaryLen) {
687 int ret = in.read(position, buf, bufOffset, bytesRemaining);
688 if (ret < 0) {
689 throw new IOException("Premature EOF from inputStream (positional read "
690 + "returned " + ret + ", was trying to read " + necessaryLen
691 + " necessary bytes and " + extraLen + " extra bytes, "
692 + "successfully read " + bytesRead);
693 }
694 position += ret;
695 bufOffset += ret;
696 bytesRemaining -= ret;
697 bytesRead += ret;
698 }
699 return bytesRead != necessaryLen && bytesRemaining <= 0;
700 }
701
702
703
704
705
706 public int getNextBlockOnDiskSizeWithHeader() {
707 return nextBlockOnDiskSizeWithHeader;
708 }
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723 public static class Writer {
724
725 private enum State {
726 INIT,
727 WRITING,
728 BLOCK_READY
729 };
730
731
732 private State state = State.INIT;
733
734
735 private final HFileDataBlockEncoder dataBlockEncoder;
736
737 private HFileBlockEncodingContext dataBlockEncodingCtx;
738
739
740 private HFileBlockDefaultEncodingContext defaultBlockEncodingCtx;
741
742
743
744
745
746
747
748 private ByteArrayOutputStream baosInMemory;
749
750
751
752
753
754
755 private BlockType blockType;
756
757
758
759
760
761 private DataOutputStream userDataStream;
762
763
764
765
766
767
768 private byte[] onDiskBytesWithHeader;
769
770
771
772
773
774
775
776 private byte[] onDiskChecksum;
777
778
779
780
781
782
783
784 private byte[] uncompressedBytesWithHeader;
785
786
787
788
789
790 private long startOffset;
791
792
793
794
795
796 private long[] prevOffsetByType;
797
798
799 private long prevOffset;
800
801 private HFileContext fileContext;
802
803
804
805
806 public Writer(HFileDataBlockEncoder dataBlockEncoder, HFileContext fileContext) {
807 this.dataBlockEncoder = dataBlockEncoder != null
808 ? dataBlockEncoder : NoOpDataBlockEncoder.INSTANCE;
809 defaultBlockEncodingCtx = new HFileBlockDefaultEncodingContext(null,
810 HConstants.HFILEBLOCK_DUMMY_HEADER, fileContext);
811 dataBlockEncodingCtx = this.dataBlockEncoder
812 .newDataBlockEncodingContext(HConstants.HFILEBLOCK_DUMMY_HEADER, fileContext);
813
814 if (fileContext.getBytesPerChecksum() < HConstants.HFILEBLOCK_HEADER_SIZE) {
815 throw new RuntimeException("Unsupported value of bytesPerChecksum. " +
816 " Minimum is " + HConstants.HFILEBLOCK_HEADER_SIZE + " but the configured value is " +
817 fileContext.getBytesPerChecksum());
818 }
819
820 baosInMemory = new ByteArrayOutputStream();
821
822 prevOffsetByType = new long[BlockType.values().length];
823 for (int i = 0; i < prevOffsetByType.length; ++i)
824 prevOffsetByType[i] = -1;
825
826 this.fileContext = fileContext;
827 }
828
829
830
831
832
833
834
835 public DataOutputStream startWriting(BlockType newBlockType)
836 throws IOException {
837 if (state == State.BLOCK_READY && startOffset != -1) {
838
839
840 prevOffsetByType[blockType.getId()] = startOffset;
841 }
842
843 startOffset = -1;
844 blockType = newBlockType;
845
846 baosInMemory.reset();
847 baosInMemory.write(HConstants.HFILEBLOCK_DUMMY_HEADER);
848
849 state = State.WRITING;
850
851
852 userDataStream = new DataOutputStream(baosInMemory);
853 return userDataStream;
854 }
855
856
857
858
859
860
861
862
863 DataOutputStream getUserDataStream() {
864 expectState(State.WRITING);
865 return userDataStream;
866 }
867
868
869
870
871
872 private void ensureBlockReady() throws IOException {
873 Preconditions.checkState(state != State.INIT,
874 "Unexpected state: " + state);
875
876 if (state == State.BLOCK_READY)
877 return;
878
879
880 finishBlock();
881 }
882
883
884
885
886
887
888
889 private void finishBlock() throws IOException {
890 userDataStream.flush();
891
892 uncompressedBytesWithHeader = baosInMemory.toByteArray();
893 prevOffset = prevOffsetByType[blockType.getId()];
894
895
896
897
898 state = State.BLOCK_READY;
899 if (blockType == BlockType.DATA) {
900 encodeDataBlockForDisk();
901 } else {
902 defaultBlockEncodingCtx.compressAfterEncodingWithBlockType(
903 uncompressedBytesWithHeader, blockType);
904 onDiskBytesWithHeader =
905 defaultBlockEncodingCtx.getOnDiskBytesWithHeader();
906 }
907
908 int numBytes = (int) ChecksumUtil.numBytes(
909 onDiskBytesWithHeader.length,
910 fileContext.getBytesPerChecksum());
911
912
913 putHeader(onDiskBytesWithHeader, 0,
914 onDiskBytesWithHeader.length + numBytes,
915 uncompressedBytesWithHeader.length, onDiskBytesWithHeader.length);
916
917 putHeader(uncompressedBytesWithHeader, 0,
918 onDiskBytesWithHeader.length + numBytes,
919 uncompressedBytesWithHeader.length, onDiskBytesWithHeader.length);
920
921 onDiskChecksum = new byte[numBytes];
922 ChecksumUtil.generateChecksums(
923 onDiskBytesWithHeader, 0, onDiskBytesWithHeader.length,
924 onDiskChecksum, 0, fileContext.getChecksumType(), fileContext.getBytesPerChecksum());
925 }
926
927
928
929
930
931 private void encodeDataBlockForDisk() throws IOException {
932
933 ByteBuffer rawKeyValues =
934 ByteBuffer.wrap(uncompressedBytesWithHeader, HConstants.HFILEBLOCK_HEADER_SIZE,
935 uncompressedBytesWithHeader.length - HConstants.HFILEBLOCK_HEADER_SIZE).slice();
936
937
938 dataBlockEncoder.beforeWriteToDisk(rawKeyValues, dataBlockEncodingCtx, blockType);
939
940 uncompressedBytesWithHeader =
941 dataBlockEncodingCtx.getUncompressedBytesWithHeader();
942 onDiskBytesWithHeader =
943 dataBlockEncodingCtx.getOnDiskBytesWithHeader();
944 blockType = dataBlockEncodingCtx.getBlockType();
945 }
946
947
948
949
950
951
952
953
954
955 private void putHeader(byte[] dest, int offset, int onDiskSize,
956 int uncompressedSize, int onDiskDataSize) {
957 offset = blockType.put(dest, offset);
958 offset = Bytes.putInt(dest, offset, onDiskSize - HConstants.HFILEBLOCK_HEADER_SIZE);
959 offset = Bytes.putInt(dest, offset, uncompressedSize - HConstants.HFILEBLOCK_HEADER_SIZE);
960 offset = Bytes.putLong(dest, offset, prevOffset);
961 offset = Bytes.putByte(dest, offset, fileContext.getChecksumType().getCode());
962 offset = Bytes.putInt(dest, offset, fileContext.getBytesPerChecksum());
963 Bytes.putInt(dest, offset, onDiskDataSize);
964 }
965
966
967
968
969
970
971
972
973
974 public void writeHeaderAndData(FSDataOutputStream out) throws IOException {
975 long offset = out.getPos();
976 if (startOffset != -1 && offset != startOffset) {
977 throw new IOException("A " + blockType + " block written to a "
978 + "stream twice, first at offset " + startOffset + ", then at "
979 + offset);
980 }
981 startOffset = offset;
982
983 finishBlockAndWriteHeaderAndData((DataOutputStream) out);
984 }
985
986
987
988
989
990
991
992
993
994
995 private void finishBlockAndWriteHeaderAndData(DataOutputStream out)
996 throws IOException {
997 ensureBlockReady();
998 out.write(onDiskBytesWithHeader);
999 out.write(onDiskChecksum);
1000 }
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012 byte[] getHeaderAndDataForTest() throws IOException {
1013 ensureBlockReady();
1014
1015
1016 byte[] output =
1017 new byte[onDiskBytesWithHeader.length
1018 + onDiskChecksum.length];
1019 System.arraycopy(onDiskBytesWithHeader, 0, output, 0,
1020 onDiskBytesWithHeader.length);
1021 System.arraycopy(onDiskChecksum, 0, output,
1022 onDiskBytesWithHeader.length, onDiskChecksum.length);
1023 return output;
1024 }
1025
1026
1027
1028
1029 public void release() {
1030 if (dataBlockEncodingCtx != null) {
1031 dataBlockEncodingCtx.close();
1032 dataBlockEncodingCtx = null;
1033 }
1034 if (defaultBlockEncodingCtx != null) {
1035 defaultBlockEncodingCtx.close();
1036 defaultBlockEncodingCtx = null;
1037 }
1038 }
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048 int getOnDiskSizeWithoutHeader() {
1049 expectState(State.BLOCK_READY);
1050 return onDiskBytesWithHeader.length + onDiskChecksum.length - HConstants.HFILEBLOCK_HEADER_SIZE;
1051 }
1052
1053
1054
1055
1056
1057
1058
1059
1060 int getOnDiskSizeWithHeader() {
1061 expectState(State.BLOCK_READY);
1062 return onDiskBytesWithHeader.length + onDiskChecksum.length;
1063 }
1064
1065
1066
1067
1068 int getUncompressedSizeWithoutHeader() {
1069 expectState(State.BLOCK_READY);
1070 return uncompressedBytesWithHeader.length - HConstants.HFILEBLOCK_HEADER_SIZE;
1071 }
1072
1073
1074
1075
1076 int getUncompressedSizeWithHeader() {
1077 expectState(State.BLOCK_READY);
1078 return uncompressedBytesWithHeader.length;
1079 }
1080
1081
1082 public boolean isWriting() {
1083 return state == State.WRITING;
1084 }
1085
1086
1087
1088
1089
1090
1091
1092
1093 public int blockSizeWritten() {
1094 if (state != State.WRITING)
1095 return 0;
1096 return userDataStream.size();
1097 }
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107 ByteBuffer getUncompressedBufferWithHeader() {
1108 expectState(State.BLOCK_READY);
1109 return ByteBuffer.wrap(uncompressedBytesWithHeader);
1110 }
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120 ByteBuffer getOnDiskBufferWithHeader() {
1121 expectState(State.BLOCK_READY);
1122 return ByteBuffer.wrap(onDiskBytesWithHeader);
1123 }
1124
1125 private void expectState(State expectedState) {
1126 if (state != expectedState) {
1127 throw new IllegalStateException("Expected state: " + expectedState +
1128 ", actual state: " + state);
1129 }
1130 }
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142 public void writeBlock(BlockWritable bw, FSDataOutputStream out)
1143 throws IOException {
1144 bw.writeToBlock(startWriting(bw.getBlockType()));
1145 writeHeaderAndData(out);
1146 }
1147
1148
1149
1150
1151
1152
1153
1154
1155 public HFileBlock getBlockForCaching(CacheConfig cacheConf) {
1156 HFileContext newContext = new HFileContextBuilder()
1157 .withBlockSize(fileContext.getBlocksize())
1158 .withBytesPerCheckSum(0)
1159 .withChecksumType(ChecksumType.NULL)
1160 .withCompression(fileContext.getCompression())
1161 .withDataBlockEncoding(fileContext.getDataBlockEncoding())
1162 .withHBaseCheckSum(fileContext.isUseHBaseChecksum())
1163 .withCompressTags(fileContext.isCompressTags())
1164 .withIncludesMvcc(fileContext.isIncludesMvcc())
1165 .withIncludesTags(fileContext.isIncludesTags())
1166 .build();
1167 return new HFileBlock(blockType, getOnDiskSizeWithoutHeader(),
1168 getUncompressedSizeWithoutHeader(), prevOffset,
1169 cacheConf.shouldCacheCompressed(blockType.getCategory()) ?
1170 getOnDiskBufferWithHeader() :
1171 getUncompressedBufferWithHeader(),
1172 FILL_HEADER, startOffset,
1173 onDiskBytesWithHeader.length + onDiskChecksum.length, newContext);
1174 }
1175 }
1176
1177
1178 public interface BlockWritable {
1179
1180
1181 BlockType getBlockType();
1182
1183
1184
1185
1186
1187
1188
1189 void writeToBlock(DataOutput out) throws IOException;
1190 }
1191
1192
1193
1194
1195 public interface BlockIterator {
1196
1197
1198
1199
1200 HFileBlock nextBlock() throws IOException;
1201
1202
1203
1204
1205
1206 HFileBlock nextBlockWithBlockType(BlockType blockType) throws IOException;
1207 }
1208
1209
1210 public interface FSReader {
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223 HFileBlock readBlockData(long offset, long onDiskSize,
1224 int uncompressedSize, boolean pread) throws IOException;
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235 BlockIterator blockRange(long startOffset, long endOffset);
1236
1237
1238 void closeStreams() throws IOException;
1239
1240
1241 HFileBlockDecodingContext getBlockDecodingContext();
1242
1243
1244 HFileBlockDecodingContext getDefaultBlockDecodingContext();
1245 }
1246
1247
1248
1249
1250
1251 private abstract static class AbstractFSReader implements FSReader {
1252
1253
1254
1255 protected long fileSize;
1256
1257
1258 protected final int hdrSize;
1259
1260
1261 protected HFileSystem hfs;
1262
1263
1264 protected Path path;
1265
1266 private final Lock streamLock = new ReentrantLock();
1267
1268
1269 public static final int DEFAULT_BUFFER_SIZE = 1 << 20;
1270
1271 protected HFileContext fileContext;
1272
1273 public AbstractFSReader(long fileSize, HFileSystem hfs, Path path, HFileContext fileContext)
1274 throws IOException {
1275 this.fileSize = fileSize;
1276 this.hfs = hfs;
1277 this.path = path;
1278 this.fileContext = fileContext;
1279 this.hdrSize = headerSize(fileContext.isUseHBaseChecksum());
1280 }
1281
1282 @Override
1283 public BlockIterator blockRange(final long startOffset,
1284 final long endOffset) {
1285 final FSReader owner = this;
1286 return new BlockIterator() {
1287 private long offset = startOffset;
1288
1289 @Override
1290 public HFileBlock nextBlock() throws IOException {
1291 if (offset >= endOffset)
1292 return null;
1293 HFileBlock b = readBlockData(offset, -1, -1, false);
1294 offset += b.getOnDiskSizeWithHeader();
1295 return b.unpack(fileContext, owner);
1296 }
1297
1298 @Override
1299 public HFileBlock nextBlockWithBlockType(BlockType blockType)
1300 throws IOException {
1301 HFileBlock blk = nextBlock();
1302 if (blk.getBlockType() != blockType) {
1303 throw new IOException("Expected block of type " + blockType
1304 + " but found " + blk.getBlockType());
1305 }
1306 return blk;
1307 }
1308 };
1309 }
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326 protected int readAtOffset(FSDataInputStream istream,
1327 byte[] dest, int destOffset, int size,
1328 boolean peekIntoNextBlock, long fileOffset, boolean pread)
1329 throws IOException {
1330 if (peekIntoNextBlock &&
1331 destOffset + size + hdrSize > dest.length) {
1332
1333
1334 throw new IOException("Attempted to read " + size + " bytes and " +
1335 hdrSize + " bytes of next header into a " + dest.length +
1336 "-byte array at offset " + destOffset);
1337 }
1338
1339 if (!pread && streamLock.tryLock()) {
1340
1341 try {
1342 istream.seek(fileOffset);
1343
1344 long realOffset = istream.getPos();
1345 if (realOffset != fileOffset) {
1346 throw new IOException("Tried to seek to " + fileOffset + " to "
1347 + "read " + size + " bytes, but pos=" + realOffset
1348 + " after seek");
1349 }
1350
1351 if (!peekIntoNextBlock) {
1352 IOUtils.readFully(istream, dest, destOffset, size);
1353 return -1;
1354 }
1355
1356
1357 if (!readWithExtra(istream, dest, destOffset, size, hdrSize))
1358 return -1;
1359 } finally {
1360 streamLock.unlock();
1361 }
1362 } else {
1363
1364 int extraSize = peekIntoNextBlock ? hdrSize : 0;
1365 if (!positionalReadWithExtra(istream, fileOffset, dest, destOffset,
1366 size, extraSize)) {
1367 return -1;
1368 }
1369 }
1370
1371 assert peekIntoNextBlock;
1372 return Bytes.toInt(dest, destOffset + size + BlockType.MAGIC_LENGTH) + hdrSize;
1373 }
1374
1375 }
1376
1377
1378
1379
1380
1381 private static class PrefetchedHeader {
1382 long offset = -1;
1383 byte[] header = new byte[HConstants.HFILEBLOCK_HEADER_SIZE];
1384 ByteBuffer buf = ByteBuffer.wrap(header, 0, HConstants.HFILEBLOCK_HEADER_SIZE);
1385 }
1386
1387
1388 static class FSReaderV2 extends AbstractFSReader {
1389
1390
1391 protected FSDataInputStreamWrapper streamWrapper;
1392
1393 private HFileBlockDecodingContext encodedBlockDecodingCtx;
1394
1395
1396 private final HFileBlockDefaultDecodingContext defaultDecodingCtx;
1397
1398 private ThreadLocal<PrefetchedHeader> prefetchedHeaderForThread =
1399 new ThreadLocal<PrefetchedHeader>() {
1400 @Override
1401 public PrefetchedHeader initialValue() {
1402 return new PrefetchedHeader();
1403 }
1404 };
1405
1406 public FSReaderV2(FSDataInputStreamWrapper stream, long fileSize, HFileSystem hfs, Path path,
1407 HFileContext fileContext) throws IOException {
1408 super(fileSize, hfs, path, fileContext);
1409 this.streamWrapper = stream;
1410
1411 this.streamWrapper.prepareForBlockReader(!fileContext.isUseHBaseChecksum());
1412 defaultDecodingCtx = new HFileBlockDefaultDecodingContext(fileContext);
1413 encodedBlockDecodingCtx = defaultDecodingCtx;
1414 }
1415
1416
1417
1418
1419
1420 FSReaderV2(FSDataInputStream istream, long fileSize, HFileContext fileContext) throws IOException {
1421 this(new FSDataInputStreamWrapper(istream), fileSize, null, null, fileContext);
1422 }
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435 @Override
1436 public HFileBlock readBlockData(long offset, long onDiskSizeWithHeaderL,
1437 int uncompressedSize, boolean pread) throws IOException {
1438
1439
1440
1441
1442
1443
1444 boolean doVerificationThruHBaseChecksum = streamWrapper.shouldUseHBaseChecksum();
1445 FSDataInputStream is = streamWrapper.getStream(doVerificationThruHBaseChecksum);
1446
1447 HFileBlock blk = readBlockDataInternal(is, offset,
1448 onDiskSizeWithHeaderL,
1449 uncompressedSize, pread,
1450 doVerificationThruHBaseChecksum);
1451 if (blk == null) {
1452 HFile.LOG.warn("HBase checksum verification failed for file " +
1453 path + " at offset " +
1454 offset + " filesize " + fileSize +
1455 ". Retrying read with HDFS checksums turned on...");
1456
1457 if (!doVerificationThruHBaseChecksum) {
1458 String msg = "HBase checksum verification failed for file " +
1459 path + " at offset " +
1460 offset + " filesize " + fileSize +
1461 " but this cannot happen because doVerify is " +
1462 doVerificationThruHBaseChecksum;
1463 HFile.LOG.warn(msg);
1464 throw new IOException(msg);
1465 }
1466 HFile.checksumFailures.incrementAndGet();
1467
1468
1469
1470
1471
1472
1473
1474 is = this.streamWrapper.fallbackToFsChecksum(CHECKSUM_VERIFICATION_NUM_IO_THRESHOLD);
1475 doVerificationThruHBaseChecksum = false;
1476 blk = readBlockDataInternal(is, offset, onDiskSizeWithHeaderL,
1477 uncompressedSize, pread,
1478 doVerificationThruHBaseChecksum);
1479 if (blk != null) {
1480 HFile.LOG.warn("HDFS checksum verification suceeded for file " +
1481 path + " at offset " +
1482 offset + " filesize " + fileSize);
1483 }
1484 }
1485 if (blk == null && !doVerificationThruHBaseChecksum) {
1486 String msg = "readBlockData failed, possibly due to " +
1487 "checksum verification failed for file " + path +
1488 " at offset " + offset + " filesize " + fileSize;
1489 HFile.LOG.warn(msg);
1490 throw new IOException(msg);
1491 }
1492
1493
1494
1495
1496
1497
1498
1499
1500 streamWrapper.checksumOk();
1501 return blk;
1502 }
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517 private HFileBlock readBlockDataInternal(FSDataInputStream is, long offset,
1518 long onDiskSizeWithHeaderL, int uncompressedSize, boolean pread,
1519 boolean verifyChecksum) throws IOException {
1520 if (offset < 0) {
1521 throw new IOException("Invalid offset=" + offset + " trying to read "
1522 + "block (onDiskSize=" + onDiskSizeWithHeaderL
1523 + ", uncompressedSize=" + uncompressedSize + ")");
1524 }
1525 if (uncompressedSize != -1) {
1526 throw new IOException("Version 2 block reader API does not need " +
1527 "the uncompressed size parameter");
1528 }
1529
1530 if ((onDiskSizeWithHeaderL < hdrSize && onDiskSizeWithHeaderL != -1)
1531 || onDiskSizeWithHeaderL >= Integer.MAX_VALUE) {
1532 throw new IOException("Invalid onDisksize=" + onDiskSizeWithHeaderL
1533 + ": expected to be at least " + hdrSize
1534 + " and at most " + Integer.MAX_VALUE + ", or -1 (offset="
1535 + offset + ", uncompressedSize=" + uncompressedSize + ")");
1536 }
1537
1538 int onDiskSizeWithHeader = (int) onDiskSizeWithHeaderL;
1539
1540
1541
1542
1543
1544 PrefetchedHeader prefetchedHeader = prefetchedHeaderForThread.get();
1545 ByteBuffer headerBuf = prefetchedHeader.offset == offset ?
1546 prefetchedHeader.buf : null;
1547
1548 int nextBlockOnDiskSize = 0;
1549
1550 byte[] onDiskBlock = null;
1551
1552 HFileBlock b = null;
1553 if (onDiskSizeWithHeader > 0) {
1554
1555
1556
1557
1558
1559
1560
1561
1562 int preReadHeaderSize = headerBuf == null ? 0 : hdrSize;
1563 onDiskBlock = new byte[onDiskSizeWithHeader + hdrSize];
1564
1565 nextBlockOnDiskSize = readAtOffset(is, onDiskBlock,
1566 preReadHeaderSize, onDiskSizeWithHeader - preReadHeaderSize,
1567 true, offset + preReadHeaderSize, pread);
1568 if (headerBuf != null) {
1569
1570
1571 System.arraycopy(headerBuf.array(),
1572 headerBuf.arrayOffset(), onDiskBlock, 0, hdrSize);
1573 } else {
1574 headerBuf = ByteBuffer.wrap(onDiskBlock, 0, hdrSize);
1575 }
1576
1577
1578
1579 try {
1580 b = new HFileBlock(headerBuf, fileContext.isUseHBaseChecksum());
1581 } catch (IOException ex) {
1582
1583 throw new IOException("Failed to read compressed block at "
1584 + offset
1585 + ", onDiskSizeWithoutHeader="
1586 + onDiskSizeWithHeader
1587 + ", preReadHeaderSize="
1588 + hdrSize
1589 + ", header.length="
1590 + prefetchedHeader.header.length
1591 + ", header bytes: "
1592 + Bytes.toStringBinary(prefetchedHeader.header, 0,
1593 hdrSize), ex);
1594 }
1595
1596 int onDiskSizeWithoutHeader = onDiskSizeWithHeader - hdrSize;
1597 assert onDiskSizeWithoutHeader >= 0;
1598 b.validateOnDiskSizeWithoutHeader(onDiskSizeWithoutHeader);
1599 } else {
1600
1601
1602
1603
1604
1605
1606
1607
1608 if (headerBuf == null) {
1609
1610
1611
1612
1613
1614 headerBuf = ByteBuffer.allocate(hdrSize);
1615 readAtOffset(is, headerBuf.array(), headerBuf.arrayOffset(),
1616 hdrSize, false, offset, pread);
1617 }
1618 b = new HFileBlock(headerBuf, fileContext.isUseHBaseChecksum());
1619 onDiskBlock = new byte[b.getOnDiskSizeWithHeader() + hdrSize];
1620 System.arraycopy(headerBuf.array(), headerBuf.arrayOffset(), onDiskBlock, 0, hdrSize);
1621 nextBlockOnDiskSize =
1622 readAtOffset(is, onDiskBlock, hdrSize, b.getOnDiskSizeWithHeader()
1623 - hdrSize, true, offset + hdrSize, pread);
1624 onDiskSizeWithHeader = b.onDiskSizeWithoutHeader + hdrSize;
1625 }
1626
1627 if (!fileContext.isCompressedOrEncrypted()) {
1628 b.assumeUncompressed();
1629 }
1630
1631 if (verifyChecksum && !validateBlockChecksum(b, onDiskBlock, hdrSize)) {
1632 return null;
1633 }
1634
1635
1636
1637
1638
1639 b = new HFileBlock(ByteBuffer.wrap(onDiskBlock, 0, onDiskSizeWithHeader),
1640 this.fileContext.isUseHBaseChecksum());
1641
1642 b.nextBlockOnDiskSizeWithHeader = nextBlockOnDiskSize;
1643
1644
1645 if (b.hasNextBlockHeader()) {
1646 prefetchedHeader.offset = offset + b.getOnDiskSizeWithHeader();
1647 System.arraycopy(onDiskBlock, onDiskSizeWithHeader,
1648 prefetchedHeader.header, 0, hdrSize);
1649 }
1650
1651 b.offset = offset;
1652 b.fileContext.setIncludesTags(this.fileContext.isIncludesTags());
1653 b.fileContext.setIncludesMvcc(this.fileContext.isIncludesMvcc());
1654 return b;
1655 }
1656
1657 void setIncludesMemstoreTS(boolean includesMemstoreTS) {
1658 this.fileContext.setIncludesMvcc(includesMemstoreTS);
1659 }
1660
1661 void setDataBlockEncoder(HFileDataBlockEncoder encoder) {
1662 encodedBlockDecodingCtx = encoder.newDataBlockDecodingContext(this.fileContext);
1663 }
1664
1665 @Override
1666 public HFileBlockDecodingContext getBlockDecodingContext() {
1667 return this.encodedBlockDecodingCtx;
1668 }
1669
1670 @Override
1671 public HFileBlockDecodingContext getDefaultBlockDecodingContext() {
1672 return this.defaultDecodingCtx;
1673 }
1674
1675
1676
1677
1678
1679
1680
1681 protected boolean validateBlockChecksum(HFileBlock block, byte[] data, int hdrSize)
1682 throws IOException {
1683 return ChecksumUtil.validateBlockChecksum(path, block, data, hdrSize);
1684 }
1685
1686 @Override
1687 public void closeStreams() throws IOException {
1688 streamWrapper.close();
1689 }
1690
1691 @Override
1692 public String toString() {
1693 return "FSReaderV2 [ hfs=" + hfs + " path=" + path + " fileContext=" + fileContext + " ]";
1694 }
1695 }
1696
1697 @Override
1698 public int getSerializedLength() {
1699 if (buf != null) {
1700
1701 int extraSpace = hasNextBlockHeader() ? headerSize() : 0;
1702 return this.buf.limit() + extraSpace + HFileBlock.EXTRA_SERIALIZATION_SPACE;
1703 }
1704 return 0;
1705 }
1706
1707 @Override
1708 public void serialize(ByteBuffer destination) {
1709
1710 destination.put(this.buf.array(), this.buf.arrayOffset(),
1711 getSerializedLength() - EXTRA_SERIALIZATION_SPACE);
1712 serializeExtraInfo(destination);
1713 }
1714
1715 public void serializeExtraInfo(ByteBuffer destination) {
1716 destination.put(this.fileContext.isUseHBaseChecksum() ? (byte) 1 : (byte) 0);
1717 destination.putLong(this.offset);
1718 destination.putInt(this.nextBlockOnDiskSizeWithHeader);
1719 destination.rewind();
1720 }
1721
1722 @Override
1723 public CacheableDeserializer<Cacheable> getDeserializer() {
1724 return HFileBlock.blockDeserializer;
1725 }
1726
1727 @Override
1728 public boolean equals(Object comparison) {
1729 if (this == comparison) {
1730 return true;
1731 }
1732 if (comparison == null) {
1733 return false;
1734 }
1735 if (comparison.getClass() != this.getClass()) {
1736 return false;
1737 }
1738
1739 HFileBlock castedComparison = (HFileBlock) comparison;
1740
1741 if (castedComparison.blockType != this.blockType) {
1742 return false;
1743 }
1744 if (castedComparison.nextBlockOnDiskSizeWithHeader != this.nextBlockOnDiskSizeWithHeader) {
1745 return false;
1746 }
1747 if (castedComparison.offset != this.offset) {
1748 return false;
1749 }
1750 if (castedComparison.onDiskSizeWithoutHeader != this.onDiskSizeWithoutHeader) {
1751 return false;
1752 }
1753 if (castedComparison.prevBlockOffset != this.prevBlockOffset) {
1754 return false;
1755 }
1756 if (castedComparison.uncompressedSizeWithoutHeader != this.uncompressedSizeWithoutHeader) {
1757 return false;
1758 }
1759 if (Bytes.compareTo(this.buf.array(), this.buf.arrayOffset(), this.buf.limit(),
1760 castedComparison.buf.array(), castedComparison.buf.arrayOffset(),
1761 castedComparison.buf.limit()) != 0) {
1762 return false;
1763 }
1764 return true;
1765 }
1766
1767 public DataBlockEncoding getDataBlockEncoding() {
1768 if (blockType == BlockType.ENCODED_DATA) {
1769 return DataBlockEncoding.getEncodingById(getDataBlockEncodingId());
1770 }
1771 return DataBlockEncoding.NONE;
1772 }
1773
1774 byte getChecksumType() {
1775 return this.fileContext.getChecksumType().getCode();
1776 }
1777
1778 int getBytesPerChecksum() {
1779 return this.fileContext.getBytesPerChecksum();
1780 }
1781
1782
1783 int getOnDiskDataSizeWithHeader() {
1784 return this.onDiskDataSizeWithHeader;
1785 }
1786
1787
1788
1789
1790
1791 int totalChecksumBytes() {
1792
1793
1794
1795
1796 if (!fileContext.isUseHBaseChecksum() || this.fileContext.getBytesPerChecksum() == 0) {
1797 return 0;
1798 }
1799 return (int)ChecksumUtil.numBytes(onDiskDataSizeWithHeader, this.fileContext.getBytesPerChecksum());
1800 }
1801
1802
1803
1804
1805 public int headerSize() {
1806 return headerSize(this.fileContext.isUseHBaseChecksum());
1807 }
1808
1809
1810
1811
1812 public static int headerSize(boolean usesHBaseChecksum) {
1813 if (usesHBaseChecksum) {
1814 return HConstants.HFILEBLOCK_HEADER_SIZE;
1815 }
1816 return HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM;
1817 }
1818
1819
1820
1821
1822 public byte[] getDummyHeaderForVersion() {
1823 return getDummyHeaderForVersion(this.fileContext.isUseHBaseChecksum());
1824 }
1825
1826
1827
1828
1829 static private byte[] getDummyHeaderForVersion(boolean usesHBaseChecksum) {
1830 if (usesHBaseChecksum) {
1831 return HConstants.HFILEBLOCK_DUMMY_HEADER;
1832 }
1833 return DUMMY_HEADER_NO_CHECKSUM;
1834 }
1835
1836
1837
1838
1839
1840 public HFileContext getHFileContext() {
1841 return this.fileContext;
1842 }
1843
1844
1845
1846
1847
1848
1849 static String toStringHeader(ByteBuffer buf) throws IOException {
1850 int offset = buf.arrayOffset();
1851 byte[] b = buf.array();
1852 long magic = Bytes.toLong(b, offset);
1853 BlockType bt = BlockType.read(buf);
1854 offset += Bytes.SIZEOF_LONG;
1855 int compressedBlockSizeNoHeader = Bytes.toInt(b, offset);
1856 offset += Bytes.SIZEOF_INT;
1857 int uncompressedBlockSizeNoHeader = Bytes.toInt(b, offset);
1858 offset += Bytes.SIZEOF_INT;
1859 long prevBlockOffset = Bytes.toLong(b, offset);
1860 offset += Bytes.SIZEOF_LONG;
1861 byte cksumtype = b[offset];
1862 offset += Bytes.SIZEOF_BYTE;
1863 long bytesPerChecksum = Bytes.toInt(b, offset);
1864 offset += Bytes.SIZEOF_INT;
1865 long onDiskDataSizeWithHeader = Bytes.toInt(b, offset);
1866 offset += Bytes.SIZEOF_INT;
1867 return " Header dump: magic: " + magic +
1868 " blockType " + bt +
1869 " compressedBlockSizeNoHeader " +
1870 compressedBlockSizeNoHeader +
1871 " uncompressedBlockSizeNoHeader " +
1872 uncompressedBlockSizeNoHeader +
1873 " prevBlockOffset " + prevBlockOffset +
1874 " checksumType " + ChecksumType.codeToType(cksumtype) +
1875 " bytesPerChecksum " + bytesPerChecksum +
1876 " onDiskDataSizeWithHeader " + onDiskDataSizeWithHeader;
1877 }
1878 }