1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21 package org.apache.hadoop.hbase.ipc;
22
23 import org.apache.hadoop.hbase.util.Bytes;
24
25 import java.io.IOException;
26 import java.io.OutputStream;
27 import java.nio.ByteBuffer;
28
29
30
31
32 public class ByteBufferOutputStream extends OutputStream {
33
34 protected ByteBuffer buf;
35
36 public ByteBufferOutputStream(int capacity) {
37 this(capacity, false);
38 }
39
40 public ByteBufferOutputStream(int capacity, boolean useDirectByteBuffer) {
41 if (useDirectByteBuffer) {
42 buf = ByteBuffer.allocateDirect(capacity);
43 } else {
44 buf = ByteBuffer.allocate(capacity);
45 }
46 }
47
48 public int size() {
49 return buf.position();
50 }
51
52
53
54
55
56 public ByteBuffer getByteBuffer() {
57 buf.flip();
58 return buf;
59 }
60
61 private void checkSizeAndGrow(int extra) {
62 if ( (buf.position() + extra) > buf.limit()) {
63
64
65 int newSize = (int)Math.min((((long)buf.capacity()) * 2),
66 (long)(Integer.MAX_VALUE));
67 newSize = Math.max(newSize, buf.position() + extra);
68
69 ByteBuffer newBuf = ByteBuffer.allocate(newSize);
70 buf.flip();
71 newBuf.put(buf);
72 buf = newBuf;
73 }
74 }
75
76
77 @Override
78 public void write(int b) throws IOException {
79 checkSizeAndGrow(Bytes.SIZEOF_BYTE);
80
81 buf.put((byte)b);
82 }
83
84 @Override
85 public void write(byte[] b) throws IOException {
86 checkSizeAndGrow(b.length);
87
88 buf.put(b);
89 }
90
91 @Override
92 public void write(byte[] b, int off, int len) throws IOException {
93 checkSizeAndGrow(len);
94
95 buf.put(b, off, len);
96 }
97
98 @Override
99 public void flush() throws IOException {
100
101 }
102
103 @Override
104 public void close() throws IOException {
105
106 }
107 }