1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.codec.prefixtree.blockmeta;
20
21 import java.io.ByteArrayOutputStream;
22 import java.io.IOException;
23 import java.nio.ByteBuffer;
24
25 import org.apache.hadoop.hbase.KeyValue;
26 import org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeBlockMeta;
27 import org.junit.Assert;
28 import org.junit.Test;
29
30 public class TestBlockMeta {
31
32 static int BLOCK_START = 123;
33
34 private static PrefixTreeBlockMeta createSample() {
35 PrefixTreeBlockMeta m = new PrefixTreeBlockMeta();
36 m.setNumMetaBytes(0);
37 m.setNumKeyValueBytes(3195);
38
39 m.setNumRowBytes(0);
40 m.setNumFamilyBytes(3);
41 m.setNumQualifierBytes(12345);
42 m.setNumTimestampBytes(23456);
43 m.setNumMvccVersionBytes(5);
44 m.setNumValueBytes(34567);
45
46 m.setNextNodeOffsetWidth(3);
47 m.setFamilyOffsetWidth(1);
48 m.setQualifierOffsetWidth(2);
49 m.setTimestampIndexWidth(1);
50 m.setMvccVersionIndexWidth(2);
51 m.setValueOffsetWidth(8);
52 m.setValueLengthWidth(3);
53
54 m.setRowTreeDepth(11);
55 m.setMaxRowLength(200);
56 m.setMaxQualifierLength(50);
57
58 m.setMinTimestamp(1318966363481L);
59 m.setTimestampDeltaWidth(3);
60 m.setMinMvccVersion(100L);
61 m.setMvccVersionDeltaWidth(4);
62
63 m.setAllSameType(false);
64 m.setAllTypes(KeyValue.Type.Delete.getCode());
65
66 m.setNumUniqueRows(88);
67 m.setNumUniqueFamilies(1);
68 m.setNumUniqueQualifiers(56);
69 return m;
70 }
71
72 @Test
73 public void testStreamSerialization() throws IOException {
74 PrefixTreeBlockMeta original = createSample();
75 ByteArrayOutputStream os = new ByteArrayOutputStream(10000);
76 original.writeVariableBytesToOutputStream(os);
77 ByteBuffer buffer = ByteBuffer.wrap(os.toByteArray());
78 PrefixTreeBlockMeta roundTripped = new PrefixTreeBlockMeta(buffer);
79 Assert.assertTrue(original.equals(roundTripped));
80 }
81
82 }