1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.io.hfile.bucket;
20
21 import static org.junit.Assert.assertTrue;
22
23 import java.nio.ByteBuffer;
24
25 import org.apache.hadoop.hbase.SmallTests;
26 import org.junit.Test;
27 import org.junit.experimental.categories.Category;
28
29
30
31
32 @Category(SmallTests.class)
33 public class TestByteBufferIOEngine {
34
35 @Test
36 public void testByteBufferIOEngine() throws Exception {
37 int capacity = 32 * 1024 * 1024;
38 int testNum = 100;
39 int maxBlockSize = 64 * 1024;
40 ByteBufferIOEngine ioEngine = new ByteBufferIOEngine(capacity, false);
41 int testOffsetAtStartNum = testNum / 10;
42 int testOffsetAtEndNum = testNum / 10;
43 for (int i = 0; i < testNum; i++) {
44 byte val = (byte) (Math.random() * 255);
45 int blockSize = (int) (Math.random() * maxBlockSize);
46 byte[] byteArray = new byte[blockSize];
47 for (int j = 0; j < byteArray.length; ++j) {
48 byteArray[j] = val;
49 }
50 ByteBuffer srcBuffer = ByteBuffer.wrap(byteArray);
51 int offset = 0;
52 if (testOffsetAtStartNum > 0) {
53 testOffsetAtStartNum--;
54 offset = 0;
55 } else if (testOffsetAtEndNum > 0) {
56 testOffsetAtEndNum--;
57 offset = capacity - blockSize;
58 } else {
59 offset = (int) (Math.random() * (capacity - maxBlockSize));
60 }
61 ioEngine.write(srcBuffer, offset);
62 ByteBuffer dstBuffer = ByteBuffer.allocate(blockSize);
63 ioEngine.read(dstBuffer, offset);
64 byte[] byteArray2 = dstBuffer.array();
65 for (int j = 0; j < byteArray.length; ++j) {
66 assertTrue(byteArray[j] == byteArray2[j]);
67 }
68 }
69 assert testOffsetAtStartNum == 0;
70 assert testOffsetAtEndNum == 0;
71 }
72 }