1
2
3
4 package org.apache.hadoop.hbase.rest.protobuf.generated;
5
6 public final class ScannerMessage {
7 private ScannerMessage() {}
8 public static void registerAllExtensions(
9 com.google.protobuf.ExtensionRegistry registry) {
10 }
11 public static final class Scanner extends
12 com.google.protobuf.GeneratedMessage {
13
14 private Scanner() {
15 initFields();
16 }
17 private Scanner(boolean noInit) {}
18
19 private static final Scanner defaultInstance;
20 public static Scanner getDefaultInstance() {
21 return defaultInstance;
22 }
23
24 public Scanner getDefaultInstanceForType() {
25 return defaultInstance;
26 }
27
28 public static final com.google.protobuf.Descriptors.Descriptor
29 getDescriptor() {
30 return org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor;
31 }
32
33 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
34 internalGetFieldAccessorTable() {
35 return org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_fieldAccessorTable;
36 }
37
38
39 public static final int STARTROW_FIELD_NUMBER = 1;
40 private boolean hasStartRow;
41 private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY;
42 public boolean hasStartRow() { return hasStartRow; }
43 public com.google.protobuf.ByteString getStartRow() { return startRow_; }
44
45
46 public static final int ENDROW_FIELD_NUMBER = 2;
47 private boolean hasEndRow;
48 private com.google.protobuf.ByteString endRow_ = com.google.protobuf.ByteString.EMPTY;
49 public boolean hasEndRow() { return hasEndRow; }
50 public com.google.protobuf.ByteString getEndRow() { return endRow_; }
51
52
53 public static final int COLUMNS_FIELD_NUMBER = 3;
54 private java.util.List<com.google.protobuf.ByteString> columns_ =
55 java.util.Collections.emptyList();
56 public java.util.List<com.google.protobuf.ByteString> getColumnsList() {
57 return columns_;
58 }
59 public int getColumnsCount() { return columns_.size(); }
60 public com.google.protobuf.ByteString getColumns(int index) {
61 return columns_.get(index);
62 }
63
64
65 public static final int BATCH_FIELD_NUMBER = 4;
66 private boolean hasBatch;
67 private int batch_ = 0;
68 public boolean hasBatch() { return hasBatch; }
69 public int getBatch() { return batch_; }
70
71
72 public static final int STARTTIME_FIELD_NUMBER = 5;
73 private boolean hasStartTime;
74 private long startTime_ = 0L;
75 public boolean hasStartTime() { return hasStartTime; }
76 public long getStartTime() { return startTime_; }
77
78
79 public static final int ENDTIME_FIELD_NUMBER = 6;
80 private boolean hasEndTime;
81 private long endTime_ = 0L;
82 public boolean hasEndTime() { return hasEndTime; }
83 public long getEndTime() { return endTime_; }
84
85
86 public static final int MAXVERSIONS_FIELD_NUMBER = 7;
87 private boolean hasMaxVersions;
88 private int maxVersions_ = 0;
89 public boolean hasMaxVersions() { return hasMaxVersions; }
90 public int getMaxVersions() { return maxVersions_; }
91
92
93 public static final int FILTER_FIELD_NUMBER = 8;
94 private boolean hasFilter;
95 private java.lang.String filter_ = "";
96 public boolean hasFilter() { return hasFilter; }
97 public java.lang.String getFilter() { return filter_; }
98
99 private void initFields() {
100 }
101 public final boolean isInitialized() {
102 return true;
103 }
104
105 public void writeTo(com.google.protobuf.CodedOutputStream output)
106 throws java.io.IOException {
107 getSerializedSize();
108 if (hasStartRow()) {
109 output.writeBytes(1, getStartRow());
110 }
111 if (hasEndRow()) {
112 output.writeBytes(2, getEndRow());
113 }
114 for (com.google.protobuf.ByteString element : getColumnsList()) {
115 output.writeBytes(3, element);
116 }
117 if (hasBatch()) {
118 output.writeInt32(4, getBatch());
119 }
120 if (hasStartTime()) {
121 output.writeInt64(5, getStartTime());
122 }
123 if (hasEndTime()) {
124 output.writeInt64(6, getEndTime());
125 }
126 if (hasMaxVersions()) {
127 output.writeInt32(7, getMaxVersions());
128 }
129 if (hasFilter()) {
130 output.writeString(8, getFilter());
131 }
132 getUnknownFields().writeTo(output);
133 }
134
135 private int memoizedSerializedSize = -1;
136 public int getSerializedSize() {
137 int size = memoizedSerializedSize;
138 if (size != -1) return size;
139
140 size = 0;
141 if (hasStartRow()) {
142 size += com.google.protobuf.CodedOutputStream
143 .computeBytesSize(1, getStartRow());
144 }
145 if (hasEndRow()) {
146 size += com.google.protobuf.CodedOutputStream
147 .computeBytesSize(2, getEndRow());
148 }
149 {
150 int dataSize = 0;
151 for (com.google.protobuf.ByteString element : getColumnsList()) {
152 dataSize += com.google.protobuf.CodedOutputStream
153 .computeBytesSizeNoTag(element);
154 }
155 size += dataSize;
156 size += 1 * getColumnsList().size();
157 }
158 if (hasBatch()) {
159 size += com.google.protobuf.CodedOutputStream
160 .computeInt32Size(4, getBatch());
161 }
162 if (hasStartTime()) {
163 size += com.google.protobuf.CodedOutputStream
164 .computeInt64Size(5, getStartTime());
165 }
166 if (hasEndTime()) {
167 size += com.google.protobuf.CodedOutputStream
168 .computeInt64Size(6, getEndTime());
169 }
170 if (hasMaxVersions()) {
171 size += com.google.protobuf.CodedOutputStream
172 .computeInt32Size(7, getMaxVersions());
173 }
174 if (hasFilter()) {
175 size += com.google.protobuf.CodedOutputStream
176 .computeStringSize(8, getFilter());
177 }
178 size += getUnknownFields().getSerializedSize();
179 memoizedSerializedSize = size;
180 return size;
181 }
182
183 public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
184 com.google.protobuf.ByteString data)
185 throws com.google.protobuf.InvalidProtocolBufferException {
186 return newBuilder().mergeFrom(data).buildParsed();
187 }
188 public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
189 com.google.protobuf.ByteString data,
190 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
191 throws com.google.protobuf.InvalidProtocolBufferException {
192 return newBuilder().mergeFrom(data, extensionRegistry)
193 .buildParsed();
194 }
195 public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(byte[] data)
196 throws com.google.protobuf.InvalidProtocolBufferException {
197 return newBuilder().mergeFrom(data).buildParsed();
198 }
199 public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
200 byte[] data,
201 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
202 throws com.google.protobuf.InvalidProtocolBufferException {
203 return newBuilder().mergeFrom(data, extensionRegistry)
204 .buildParsed();
205 }
206 public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(java.io.InputStream input)
207 throws java.io.IOException {
208 return newBuilder().mergeFrom(input).buildParsed();
209 }
210 public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
211 java.io.InputStream input,
212 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
213 throws java.io.IOException {
214 return newBuilder().mergeFrom(input, extensionRegistry)
215 .buildParsed();
216 }
217 public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseDelimitedFrom(java.io.InputStream input)
218 throws java.io.IOException {
219 Builder builder = newBuilder();
220 if (builder.mergeDelimitedFrom(input)) {
221 return builder.buildParsed();
222 } else {
223 return null;
224 }
225 }
226 public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseDelimitedFrom(
227 java.io.InputStream input,
228 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
229 throws java.io.IOException {
230 Builder builder = newBuilder();
231 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
232 return builder.buildParsed();
233 } else {
234 return null;
235 }
236 }
237 public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
238 com.google.protobuf.CodedInputStream input)
239 throws java.io.IOException {
240 return newBuilder().mergeFrom(input).buildParsed();
241 }
242 public static org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner parseFrom(
243 com.google.protobuf.CodedInputStream input,
244 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
245 throws java.io.IOException {
246 return newBuilder().mergeFrom(input, extensionRegistry)
247 .buildParsed();
248 }
249
250 public static Builder newBuilder() { return Builder.create(); }
251 public Builder newBuilderForType() { return newBuilder(); }
252 public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner prototype) {
253 return newBuilder().mergeFrom(prototype);
254 }
255 public Builder toBuilder() { return newBuilder(this); }
256
257 public static final class Builder extends
258 com.google.protobuf.GeneratedMessage.Builder<Builder> {
259 private org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner result;
260
261
262 private Builder() {}
263
264 private static Builder create() {
265 Builder builder = new Builder();
266 builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner();
267 return builder;
268 }
269
270 protected org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner internalGetResult() {
271 return result;
272 }
273
274 public Builder clear() {
275 if (result == null) {
276 throw new IllegalStateException(
277 "Cannot call clear() after build().");
278 }
279 result = new org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner();
280 return this;
281 }
282
283 public Builder clone() {
284 return create().mergeFrom(result);
285 }
286
287 public com.google.protobuf.Descriptors.Descriptor
288 getDescriptorForType() {
289 return org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.getDescriptor();
290 }
291
292 public org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner getDefaultInstanceForType() {
293 return org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.getDefaultInstance();
294 }
295
296 public boolean isInitialized() {
297 return result.isInitialized();
298 }
299 public org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner build() {
300 if (result != null && !isInitialized()) {
301 throw newUninitializedMessageException(result);
302 }
303 return buildPartial();
304 }
305
306 private org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner buildParsed()
307 throws com.google.protobuf.InvalidProtocolBufferException {
308 if (!isInitialized()) {
309 throw newUninitializedMessageException(
310 result).asInvalidProtocolBufferException();
311 }
312 return buildPartial();
313 }
314
315 public org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner buildPartial() {
316 if (result == null) {
317 throw new IllegalStateException(
318 "build() has already been called on this Builder.");
319 }
320 if (result.columns_ != java.util.Collections.EMPTY_LIST) {
321 result.columns_ =
322 java.util.Collections.unmodifiableList(result.columns_);
323 }
324 org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner returnMe = result;
325 result = null;
326 return returnMe;
327 }
328
329 public Builder mergeFrom(com.google.protobuf.Message other) {
330 if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner) {
331 return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner)other);
332 } else {
333 super.mergeFrom(other);
334 return this;
335 }
336 }
337
338 public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner other) {
339 if (other == org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.getDefaultInstance()) return this;
340 if (other.hasStartRow()) {
341 setStartRow(other.getStartRow());
342 }
343 if (other.hasEndRow()) {
344 setEndRow(other.getEndRow());
345 }
346 if (!other.columns_.isEmpty()) {
347 if (result.columns_.isEmpty()) {
348 result.columns_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
349 }
350 result.columns_.addAll(other.columns_);
351 }
352 if (other.hasBatch()) {
353 setBatch(other.getBatch());
354 }
355 if (other.hasStartTime()) {
356 setStartTime(other.getStartTime());
357 }
358 if (other.hasEndTime()) {
359 setEndTime(other.getEndTime());
360 }
361 if (other.hasMaxVersions()) {
362 setMaxVersions(other.getMaxVersions());
363 }
364 if (other.hasFilter()) {
365 setFilter(other.getFilter());
366 }
367 this.mergeUnknownFields(other.getUnknownFields());
368 return this;
369 }
370
371 public Builder mergeFrom(
372 com.google.protobuf.CodedInputStream input,
373 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
374 throws java.io.IOException {
375 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
376 com.google.protobuf.UnknownFieldSet.newBuilder(
377 this.getUnknownFields());
378 while (true) {
379 int tag = input.readTag();
380 switch (tag) {
381 case 0:
382 this.setUnknownFields(unknownFields.build());
383 return this;
384 default: {
385 if (!parseUnknownField(input, unknownFields,
386 extensionRegistry, tag)) {
387 this.setUnknownFields(unknownFields.build());
388 return this;
389 }
390 break;
391 }
392 case 10: {
393 setStartRow(input.readBytes());
394 break;
395 }
396 case 18: {
397 setEndRow(input.readBytes());
398 break;
399 }
400 case 26: {
401 addColumns(input.readBytes());
402 break;
403 }
404 case 32: {
405 setBatch(input.readInt32());
406 break;
407 }
408 case 40: {
409 setStartTime(input.readInt64());
410 break;
411 }
412 case 48: {
413 setEndTime(input.readInt64());
414 break;
415 }
416 case 56: {
417 setMaxVersions(input.readInt32());
418 break;
419 }
420 case 66: {
421 setFilter(input.readString());
422 break;
423 }
424 }
425 }
426 }
427
428
429
430 public boolean hasStartRow() {
431 return result.hasStartRow();
432 }
433 public com.google.protobuf.ByteString getStartRow() {
434 return result.getStartRow();
435 }
436 public Builder setStartRow(com.google.protobuf.ByteString value) {
437 if (value == null) {
438 throw new NullPointerException();
439 }
440 result.hasStartRow = true;
441 result.startRow_ = value;
442 return this;
443 }
444 public Builder clearStartRow() {
445 result.hasStartRow = false;
446 result.startRow_ = getDefaultInstance().getStartRow();
447 return this;
448 }
449
450
451 public boolean hasEndRow() {
452 return result.hasEndRow();
453 }
454 public com.google.protobuf.ByteString getEndRow() {
455 return result.getEndRow();
456 }
457 public Builder setEndRow(com.google.protobuf.ByteString value) {
458 if (value == null) {
459 throw new NullPointerException();
460 }
461 result.hasEndRow = true;
462 result.endRow_ = value;
463 return this;
464 }
465 public Builder clearEndRow() {
466 result.hasEndRow = false;
467 result.endRow_ = getDefaultInstance().getEndRow();
468 return this;
469 }
470
471
472 public java.util.List<com.google.protobuf.ByteString> getColumnsList() {
473 return java.util.Collections.unmodifiableList(result.columns_);
474 }
475 public int getColumnsCount() {
476 return result.getColumnsCount();
477 }
478 public com.google.protobuf.ByteString getColumns(int index) {
479 return result.getColumns(index);
480 }
481 public Builder setColumns(int index, com.google.protobuf.ByteString value) {
482 if (value == null) {
483 throw new NullPointerException();
484 }
485 result.columns_.set(index, value);
486 return this;
487 }
488 public Builder addColumns(com.google.protobuf.ByteString value) {
489 if (value == null) {
490 throw new NullPointerException();
491 }
492 if (result.columns_.isEmpty()) {
493 result.columns_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
494 }
495 result.columns_.add(value);
496 return this;
497 }
498 public Builder addAllColumns(
499 java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
500 if (result.columns_.isEmpty()) {
501 result.columns_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
502 }
503 super.addAll(values, result.columns_);
504 return this;
505 }
506 public Builder clearColumns() {
507 result.columns_ = java.util.Collections.emptyList();
508 return this;
509 }
510
511
512 public boolean hasBatch() {
513 return result.hasBatch();
514 }
515 public int getBatch() {
516 return result.getBatch();
517 }
518 public Builder setBatch(int value) {
519 result.hasBatch = true;
520 result.batch_ = value;
521 return this;
522 }
523 public Builder clearBatch() {
524 result.hasBatch = false;
525 result.batch_ = 0;
526 return this;
527 }
528
529
530 public boolean hasStartTime() {
531 return result.hasStartTime();
532 }
533 public long getStartTime() {
534 return result.getStartTime();
535 }
536 public Builder setStartTime(long value) {
537 result.hasStartTime = true;
538 result.startTime_ = value;
539 return this;
540 }
541 public Builder clearStartTime() {
542 result.hasStartTime = false;
543 result.startTime_ = 0L;
544 return this;
545 }
546
547
548 public boolean hasEndTime() {
549 return result.hasEndTime();
550 }
551 public long getEndTime() {
552 return result.getEndTime();
553 }
554 public Builder setEndTime(long value) {
555 result.hasEndTime = true;
556 result.endTime_ = value;
557 return this;
558 }
559 public Builder clearEndTime() {
560 result.hasEndTime = false;
561 result.endTime_ = 0L;
562 return this;
563 }
564
565
566 public boolean hasMaxVersions() {
567 return result.hasMaxVersions();
568 }
569 public int getMaxVersions() {
570 return result.getMaxVersions();
571 }
572 public Builder setMaxVersions(int value) {
573 result.hasMaxVersions = true;
574 result.maxVersions_ = value;
575 return this;
576 }
577 public Builder clearMaxVersions() {
578 result.hasMaxVersions = false;
579 result.maxVersions_ = 0;
580 return this;
581 }
582
583
584 public boolean hasFilter() {
585 return result.hasFilter();
586 }
587 public java.lang.String getFilter() {
588 return result.getFilter();
589 }
590 public Builder setFilter(java.lang.String value) {
591 if (value == null) {
592 throw new NullPointerException();
593 }
594 result.hasFilter = true;
595 result.filter_ = value;
596 return this;
597 }
598 public Builder clearFilter() {
599 result.hasFilter = false;
600 result.filter_ = getDefaultInstance().getFilter();
601 return this;
602 }
603
604
605 }
606
607 static {
608 defaultInstance = new Scanner(true);
609 org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.internalForceInit();
610 defaultInstance.initFields();
611 }
612
613
614 }
615
616 private static com.google.protobuf.Descriptors.Descriptor
617 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor;
618 private static
619 com.google.protobuf.GeneratedMessage.FieldAccessorTable
620 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_fieldAccessorTable;
621
622 public static com.google.protobuf.Descriptors.FileDescriptor
623 getDescriptor() {
624 return descriptor;
625 }
626 private static com.google.protobuf.Descriptors.FileDescriptor
627 descriptor;
628 static {
629 java.lang.String[] descriptorData = {
630 "\n\024ScannerMessage.proto\022/org.apache.hadoo" +
631 "p.hbase.rest.protobuf.generated\"\224\001\n\007Scan" +
632 "ner\022\020\n\010startRow\030\001 \001(\014\022\016\n\006endRow\030\002 \001(\014\022\017\n" +
633 "\007columns\030\003 \003(\014\022\r\n\005batch\030\004 \001(\005\022\021\n\tstartTi" +
634 "me\030\005 \001(\003\022\017\n\007endTime\030\006 \001(\003\022\023\n\013maxVersions" +
635 "\030\007 \001(\005\022\016\n\006filter\030\010 \001(\t"
636 };
637 com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
638 new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
639 public com.google.protobuf.ExtensionRegistry assignDescriptors(
640 com.google.protobuf.Descriptors.FileDescriptor root) {
641 descriptor = root;
642 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor =
643 getDescriptor().getMessageTypes().get(0);
644 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_fieldAccessorTable = new
645 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
646 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Scanner_descriptor,
647 new java.lang.String[] { "StartRow", "EndRow", "Columns", "Batch", "StartTime", "EndTime", "MaxVersions", "Filter", },
648 org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.class,
649 org.apache.hadoop.hbase.rest.protobuf.generated.ScannerMessage.Scanner.Builder.class);
650 return null;
651 }
652 };
653 com.google.protobuf.Descriptors.FileDescriptor
654 .internalBuildGeneratedFileFrom(descriptorData,
655 new com.google.protobuf.Descriptors.FileDescriptor[] {
656 }, assigner);
657 }
658
659 public static void internalForceInit() {}
660
661
662 }