1
2
3
4 package org.apache.hadoop.hbase.rest.protobuf.generated;
5
6 public final class ColumnSchemaMessage {
7 private ColumnSchemaMessage() {}
8 public static void registerAllExtensions(
9 com.google.protobuf.ExtensionRegistry registry) {
10 }
11 public static final class ColumnSchema extends
12 com.google.protobuf.GeneratedMessage {
13
14 private ColumnSchema() {
15 initFields();
16 }
17 private ColumnSchema(boolean noInit) {}
18
19 private static final ColumnSchema defaultInstance;
20 public static ColumnSchema getDefaultInstance() {
21 return defaultInstance;
22 }
23
24 public ColumnSchema getDefaultInstanceForType() {
25 return defaultInstance;
26 }
27
28 public static final com.google.protobuf.Descriptors.Descriptor
29 getDescriptor() {
30 return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor;
31 }
32
33 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
34 internalGetFieldAccessorTable() {
35 return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_fieldAccessorTable;
36 }
37
38 public static final class Attribute extends
39 com.google.protobuf.GeneratedMessage {
40
41 private Attribute() {
42 initFields();
43 }
44 private Attribute(boolean noInit) {}
45
46 private static final Attribute defaultInstance;
47 public static Attribute getDefaultInstance() {
48 return defaultInstance;
49 }
50
51 public Attribute getDefaultInstanceForType() {
52 return defaultInstance;
53 }
54
55 public static final com.google.protobuf.Descriptors.Descriptor
56 getDescriptor() {
57 return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor;
58 }
59
60 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
61 internalGetFieldAccessorTable() {
62 return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable;
63 }
64
65
66 public static final int NAME_FIELD_NUMBER = 1;
67 private boolean hasName;
68 private java.lang.String name_ = "";
69 public boolean hasName() { return hasName; }
70 public java.lang.String getName() { return name_; }
71
72
73 public static final int VALUE_FIELD_NUMBER = 2;
74 private boolean hasValue;
75 private java.lang.String value_ = "";
76 public boolean hasValue() { return hasValue; }
77 public java.lang.String getValue() { return value_; }
78
79 private void initFields() {
80 }
81 public final boolean isInitialized() {
82 if (!hasName) return false;
83 if (!hasValue) return false;
84 return true;
85 }
86
87 public void writeTo(com.google.protobuf.CodedOutputStream output)
88 throws java.io.IOException {
89 getSerializedSize();
90 if (hasName()) {
91 output.writeString(1, getName());
92 }
93 if (hasValue()) {
94 output.writeString(2, getValue());
95 }
96 getUnknownFields().writeTo(output);
97 }
98
99 private int memoizedSerializedSize = -1;
100 public int getSerializedSize() {
101 int size = memoizedSerializedSize;
102 if (size != -1) return size;
103
104 size = 0;
105 if (hasName()) {
106 size += com.google.protobuf.CodedOutputStream
107 .computeStringSize(1, getName());
108 }
109 if (hasValue()) {
110 size += com.google.protobuf.CodedOutputStream
111 .computeStringSize(2, getValue());
112 }
113 size += getUnknownFields().getSerializedSize();
114 memoizedSerializedSize = size;
115 return size;
116 }
117
118 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
119 com.google.protobuf.ByteString data)
120 throws com.google.protobuf.InvalidProtocolBufferException {
121 return newBuilder().mergeFrom(data).buildParsed();
122 }
123 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
124 com.google.protobuf.ByteString data,
125 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
126 throws com.google.protobuf.InvalidProtocolBufferException {
127 return newBuilder().mergeFrom(data, extensionRegistry)
128 .buildParsed();
129 }
130 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(byte[] data)
131 throws com.google.protobuf.InvalidProtocolBufferException {
132 return newBuilder().mergeFrom(data).buildParsed();
133 }
134 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
135 byte[] data,
136 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
137 throws com.google.protobuf.InvalidProtocolBufferException {
138 return newBuilder().mergeFrom(data, extensionRegistry)
139 .buildParsed();
140 }
141 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(java.io.InputStream input)
142 throws java.io.IOException {
143 return newBuilder().mergeFrom(input).buildParsed();
144 }
145 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
146 java.io.InputStream input,
147 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
148 throws java.io.IOException {
149 return newBuilder().mergeFrom(input, extensionRegistry)
150 .buildParsed();
151 }
152 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseDelimitedFrom(java.io.InputStream input)
153 throws java.io.IOException {
154 Builder builder = newBuilder();
155 if (builder.mergeDelimitedFrom(input)) {
156 return builder.buildParsed();
157 } else {
158 return null;
159 }
160 }
161 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseDelimitedFrom(
162 java.io.InputStream input,
163 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
164 throws java.io.IOException {
165 Builder builder = newBuilder();
166 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
167 return builder.buildParsed();
168 } else {
169 return null;
170 }
171 }
172 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
173 com.google.protobuf.CodedInputStream input)
174 throws java.io.IOException {
175 return newBuilder().mergeFrom(input).buildParsed();
176 }
177 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute parseFrom(
178 com.google.protobuf.CodedInputStream input,
179 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
180 throws java.io.IOException {
181 return newBuilder().mergeFrom(input, extensionRegistry)
182 .buildParsed();
183 }
184
185 public static Builder newBuilder() { return Builder.create(); }
186 public Builder newBuilderForType() { return newBuilder(); }
187 public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute prototype) {
188 return newBuilder().mergeFrom(prototype);
189 }
190 public Builder toBuilder() { return newBuilder(this); }
191
192 public static final class Builder extends
193 com.google.protobuf.GeneratedMessage.Builder<Builder> {
194 private org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute result;
195
196
197 private Builder() {}
198
199 private static Builder create() {
200 Builder builder = new Builder();
201 builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute();
202 return builder;
203 }
204
205 protected org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute internalGetResult() {
206 return result;
207 }
208
209 public Builder clear() {
210 if (result == null) {
211 throw new IllegalStateException(
212 "Cannot call clear() after build().");
213 }
214 result = new org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute();
215 return this;
216 }
217
218 public Builder clone() {
219 return create().mergeFrom(result);
220 }
221
222 public com.google.protobuf.Descriptors.Descriptor
223 getDescriptorForType() {
224 return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDescriptor();
225 }
226
227 public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getDefaultInstanceForType() {
228 return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDefaultInstance();
229 }
230
231 public boolean isInitialized() {
232 return result.isInitialized();
233 }
234 public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute build() {
235 if (result != null && !isInitialized()) {
236 throw newUninitializedMessageException(result);
237 }
238 return buildPartial();
239 }
240
241 private org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute buildParsed()
242 throws com.google.protobuf.InvalidProtocolBufferException {
243 if (!isInitialized()) {
244 throw newUninitializedMessageException(
245 result).asInvalidProtocolBufferException();
246 }
247 return buildPartial();
248 }
249
250 public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute buildPartial() {
251 if (result == null) {
252 throw new IllegalStateException(
253 "build() has already been called on this Builder.");
254 }
255 org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute returnMe = result;
256 result = null;
257 return returnMe;
258 }
259
260 public Builder mergeFrom(com.google.protobuf.Message other) {
261 if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute) {
262 return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute)other);
263 } else {
264 super.mergeFrom(other);
265 return this;
266 }
267 }
268
269 public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute other) {
270 if (other == org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.getDefaultInstance()) return this;
271 if (other.hasName()) {
272 setName(other.getName());
273 }
274 if (other.hasValue()) {
275 setValue(other.getValue());
276 }
277 this.mergeUnknownFields(other.getUnknownFields());
278 return this;
279 }
280
281 public Builder mergeFrom(
282 com.google.protobuf.CodedInputStream input,
283 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
284 throws java.io.IOException {
285 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
286 com.google.protobuf.UnknownFieldSet.newBuilder(
287 this.getUnknownFields());
288 while (true) {
289 int tag = input.readTag();
290 switch (tag) {
291 case 0:
292 this.setUnknownFields(unknownFields.build());
293 return this;
294 default: {
295 if (!parseUnknownField(input, unknownFields,
296 extensionRegistry, tag)) {
297 this.setUnknownFields(unknownFields.build());
298 return this;
299 }
300 break;
301 }
302 case 10: {
303 setName(input.readString());
304 break;
305 }
306 case 18: {
307 setValue(input.readString());
308 break;
309 }
310 }
311 }
312 }
313
314
315
316 public boolean hasName() {
317 return result.hasName();
318 }
319 public java.lang.String getName() {
320 return result.getName();
321 }
322 public Builder setName(java.lang.String value) {
323 if (value == null) {
324 throw new NullPointerException();
325 }
326 result.hasName = true;
327 result.name_ = value;
328 return this;
329 }
330 public Builder clearName() {
331 result.hasName = false;
332 result.name_ = getDefaultInstance().getName();
333 return this;
334 }
335
336
337 public boolean hasValue() {
338 return result.hasValue();
339 }
340 public java.lang.String getValue() {
341 return result.getValue();
342 }
343 public Builder setValue(java.lang.String value) {
344 if (value == null) {
345 throw new NullPointerException();
346 }
347 result.hasValue = true;
348 result.value_ = value;
349 return this;
350 }
351 public Builder clearValue() {
352 result.hasValue = false;
353 result.value_ = getDefaultInstance().getValue();
354 return this;
355 }
356
357
358 }
359
360 static {
361 defaultInstance = new Attribute(true);
362 org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internalForceInit();
363 defaultInstance.initFields();
364 }
365
366
367 }
368
369
370 public static final int NAME_FIELD_NUMBER = 1;
371 private boolean hasName;
372 private java.lang.String name_ = "";
373 public boolean hasName() { return hasName; }
374 public java.lang.String getName() { return name_; }
375
376
377 public static final int ATTRS_FIELD_NUMBER = 2;
378 private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> attrs_ =
379 java.util.Collections.emptyList();
380 public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> getAttrsList() {
381 return attrs_;
382 }
383 public int getAttrsCount() { return attrs_.size(); }
384 public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getAttrs(int index) {
385 return attrs_.get(index);
386 }
387
388
389 public static final int TTL_FIELD_NUMBER = 3;
390 private boolean hasTtl;
391 private int ttl_ = 0;
392 public boolean hasTtl() { return hasTtl; }
393 public int getTtl() { return ttl_; }
394
395
396 public static final int MAXVERSIONS_FIELD_NUMBER = 4;
397 private boolean hasMaxVersions;
398 private int maxVersions_ = 0;
399 public boolean hasMaxVersions() { return hasMaxVersions; }
400 public int getMaxVersions() { return maxVersions_; }
401
402
403 public static final int COMPRESSION_FIELD_NUMBER = 5;
404 private boolean hasCompression;
405 private java.lang.String compression_ = "";
406 public boolean hasCompression() { return hasCompression; }
407 public java.lang.String getCompression() { return compression_; }
408
409 private void initFields() {
410 }
411 public final boolean isInitialized() {
412 for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
413 if (!element.isInitialized()) return false;
414 }
415 return true;
416 }
417
418 public void writeTo(com.google.protobuf.CodedOutputStream output)
419 throws java.io.IOException {
420 getSerializedSize();
421 if (hasName()) {
422 output.writeString(1, getName());
423 }
424 for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
425 output.writeMessage(2, element);
426 }
427 if (hasTtl()) {
428 output.writeInt32(3, getTtl());
429 }
430 if (hasMaxVersions()) {
431 output.writeInt32(4, getMaxVersions());
432 }
433 if (hasCompression()) {
434 output.writeString(5, getCompression());
435 }
436 getUnknownFields().writeTo(output);
437 }
438
439 private int memoizedSerializedSize = -1;
440 public int getSerializedSize() {
441 int size = memoizedSerializedSize;
442 if (size != -1) return size;
443
444 size = 0;
445 if (hasName()) {
446 size += com.google.protobuf.CodedOutputStream
447 .computeStringSize(1, getName());
448 }
449 for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute element : getAttrsList()) {
450 size += com.google.protobuf.CodedOutputStream
451 .computeMessageSize(2, element);
452 }
453 if (hasTtl()) {
454 size += com.google.protobuf.CodedOutputStream
455 .computeInt32Size(3, getTtl());
456 }
457 if (hasMaxVersions()) {
458 size += com.google.protobuf.CodedOutputStream
459 .computeInt32Size(4, getMaxVersions());
460 }
461 if (hasCompression()) {
462 size += com.google.protobuf.CodedOutputStream
463 .computeStringSize(5, getCompression());
464 }
465 size += getUnknownFields().getSerializedSize();
466 memoizedSerializedSize = size;
467 return size;
468 }
469
470 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
471 com.google.protobuf.ByteString data)
472 throws com.google.protobuf.InvalidProtocolBufferException {
473 return newBuilder().mergeFrom(data).buildParsed();
474 }
475 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
476 com.google.protobuf.ByteString data,
477 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
478 throws com.google.protobuf.InvalidProtocolBufferException {
479 return newBuilder().mergeFrom(data, extensionRegistry)
480 .buildParsed();
481 }
482 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(byte[] data)
483 throws com.google.protobuf.InvalidProtocolBufferException {
484 return newBuilder().mergeFrom(data).buildParsed();
485 }
486 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
487 byte[] data,
488 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
489 throws com.google.protobuf.InvalidProtocolBufferException {
490 return newBuilder().mergeFrom(data, extensionRegistry)
491 .buildParsed();
492 }
493 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(java.io.InputStream input)
494 throws java.io.IOException {
495 return newBuilder().mergeFrom(input).buildParsed();
496 }
497 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
498 java.io.InputStream input,
499 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
500 throws java.io.IOException {
501 return newBuilder().mergeFrom(input, extensionRegistry)
502 .buildParsed();
503 }
504 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseDelimitedFrom(java.io.InputStream input)
505 throws java.io.IOException {
506 Builder builder = newBuilder();
507 if (builder.mergeDelimitedFrom(input)) {
508 return builder.buildParsed();
509 } else {
510 return null;
511 }
512 }
513 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseDelimitedFrom(
514 java.io.InputStream input,
515 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
516 throws java.io.IOException {
517 Builder builder = newBuilder();
518 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
519 return builder.buildParsed();
520 } else {
521 return null;
522 }
523 }
524 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
525 com.google.protobuf.CodedInputStream input)
526 throws java.io.IOException {
527 return newBuilder().mergeFrom(input).buildParsed();
528 }
529 public static org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema parseFrom(
530 com.google.protobuf.CodedInputStream input,
531 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
532 throws java.io.IOException {
533 return newBuilder().mergeFrom(input, extensionRegistry)
534 .buildParsed();
535 }
536
537 public static Builder newBuilder() { return Builder.create(); }
538 public Builder newBuilderForType() { return newBuilder(); }
539 public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema prototype) {
540 return newBuilder().mergeFrom(prototype);
541 }
542 public Builder toBuilder() { return newBuilder(this); }
543
544 public static final class Builder extends
545 com.google.protobuf.GeneratedMessage.Builder<Builder> {
546 private org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema result;
547
548
549 private Builder() {}
550
551 private static Builder create() {
552 Builder builder = new Builder();
553 builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema();
554 return builder;
555 }
556
557 protected org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema internalGetResult() {
558 return result;
559 }
560
561 public Builder clear() {
562 if (result == null) {
563 throw new IllegalStateException(
564 "Cannot call clear() after build().");
565 }
566 result = new org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema();
567 return this;
568 }
569
570 public Builder clone() {
571 return create().mergeFrom(result);
572 }
573
574 public com.google.protobuf.Descriptors.Descriptor
575 getDescriptorForType() {
576 return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDescriptor();
577 }
578
579 public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema getDefaultInstanceForType() {
580 return org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDefaultInstance();
581 }
582
583 public boolean isInitialized() {
584 return result.isInitialized();
585 }
586 public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema build() {
587 if (result != null && !isInitialized()) {
588 throw newUninitializedMessageException(result);
589 }
590 return buildPartial();
591 }
592
593 private org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema buildParsed()
594 throws com.google.protobuf.InvalidProtocolBufferException {
595 if (!isInitialized()) {
596 throw newUninitializedMessageException(
597 result).asInvalidProtocolBufferException();
598 }
599 return buildPartial();
600 }
601
602 public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema buildPartial() {
603 if (result == null) {
604 throw new IllegalStateException(
605 "build() has already been called on this Builder.");
606 }
607 if (result.attrs_ != java.util.Collections.EMPTY_LIST) {
608 result.attrs_ =
609 java.util.Collections.unmodifiableList(result.attrs_);
610 }
611 org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema returnMe = result;
612 result = null;
613 return returnMe;
614 }
615
616 public Builder mergeFrom(com.google.protobuf.Message other) {
617 if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema) {
618 return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema)other);
619 } else {
620 super.mergeFrom(other);
621 return this;
622 }
623 }
624
625 public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema other) {
626 if (other == org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.getDefaultInstance()) return this;
627 if (other.hasName()) {
628 setName(other.getName());
629 }
630 if (!other.attrs_.isEmpty()) {
631 if (result.attrs_.isEmpty()) {
632 result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
633 }
634 result.attrs_.addAll(other.attrs_);
635 }
636 if (other.hasTtl()) {
637 setTtl(other.getTtl());
638 }
639 if (other.hasMaxVersions()) {
640 setMaxVersions(other.getMaxVersions());
641 }
642 if (other.hasCompression()) {
643 setCompression(other.getCompression());
644 }
645 this.mergeUnknownFields(other.getUnknownFields());
646 return this;
647 }
648
649 public Builder mergeFrom(
650 com.google.protobuf.CodedInputStream input,
651 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
652 throws java.io.IOException {
653 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
654 com.google.protobuf.UnknownFieldSet.newBuilder(
655 this.getUnknownFields());
656 while (true) {
657 int tag = input.readTag();
658 switch (tag) {
659 case 0:
660 this.setUnknownFields(unknownFields.build());
661 return this;
662 default: {
663 if (!parseUnknownField(input, unknownFields,
664 extensionRegistry, tag)) {
665 this.setUnknownFields(unknownFields.build());
666 return this;
667 }
668 break;
669 }
670 case 10: {
671 setName(input.readString());
672 break;
673 }
674 case 18: {
675 org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.newBuilder();
676 input.readMessage(subBuilder, extensionRegistry);
677 addAttrs(subBuilder.buildPartial());
678 break;
679 }
680 case 24: {
681 setTtl(input.readInt32());
682 break;
683 }
684 case 32: {
685 setMaxVersions(input.readInt32());
686 break;
687 }
688 case 42: {
689 setCompression(input.readString());
690 break;
691 }
692 }
693 }
694 }
695
696
697
698 public boolean hasName() {
699 return result.hasName();
700 }
701 public java.lang.String getName() {
702 return result.getName();
703 }
704 public Builder setName(java.lang.String value) {
705 if (value == null) {
706 throw new NullPointerException();
707 }
708 result.hasName = true;
709 result.name_ = value;
710 return this;
711 }
712 public Builder clearName() {
713 result.hasName = false;
714 result.name_ = getDefaultInstance().getName();
715 return this;
716 }
717
718
719 public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> getAttrsList() {
720 return java.util.Collections.unmodifiableList(result.attrs_);
721 }
722 public int getAttrsCount() {
723 return result.getAttrsCount();
724 }
725 public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getAttrs(int index) {
726 return result.getAttrs(index);
727 }
728 public Builder setAttrs(int index, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute value) {
729 if (value == null) {
730 throw new NullPointerException();
731 }
732 result.attrs_.set(index, value);
733 return this;
734 }
735 public Builder setAttrs(int index, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder builderForValue) {
736 result.attrs_.set(index, builderForValue.build());
737 return this;
738 }
739 public Builder addAttrs(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute value) {
740 if (value == null) {
741 throw new NullPointerException();
742 }
743 if (result.attrs_.isEmpty()) {
744 result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
745 }
746 result.attrs_.add(value);
747 return this;
748 }
749 public Builder addAttrs(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder builderForValue) {
750 if (result.attrs_.isEmpty()) {
751 result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
752 }
753 result.attrs_.add(builderForValue.build());
754 return this;
755 }
756 public Builder addAllAttrs(
757 java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> values) {
758 if (result.attrs_.isEmpty()) {
759 result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute>();
760 }
761 super.addAll(values, result.attrs_);
762 return this;
763 }
764 public Builder clearAttrs() {
765 result.attrs_ = java.util.Collections.emptyList();
766 return this;
767 }
768
769
770 public boolean hasTtl() {
771 return result.hasTtl();
772 }
773 public int getTtl() {
774 return result.getTtl();
775 }
776 public Builder setTtl(int value) {
777 result.hasTtl = true;
778 result.ttl_ = value;
779 return this;
780 }
781 public Builder clearTtl() {
782 result.hasTtl = false;
783 result.ttl_ = 0;
784 return this;
785 }
786
787
788 public boolean hasMaxVersions() {
789 return result.hasMaxVersions();
790 }
791 public int getMaxVersions() {
792 return result.getMaxVersions();
793 }
794 public Builder setMaxVersions(int value) {
795 result.hasMaxVersions = true;
796 result.maxVersions_ = value;
797 return this;
798 }
799 public Builder clearMaxVersions() {
800 result.hasMaxVersions = false;
801 result.maxVersions_ = 0;
802 return this;
803 }
804
805
806 public boolean hasCompression() {
807 return result.hasCompression();
808 }
809 public java.lang.String getCompression() {
810 return result.getCompression();
811 }
812 public Builder setCompression(java.lang.String value) {
813 if (value == null) {
814 throw new NullPointerException();
815 }
816 result.hasCompression = true;
817 result.compression_ = value;
818 return this;
819 }
820 public Builder clearCompression() {
821 result.hasCompression = false;
822 result.compression_ = getDefaultInstance().getCompression();
823 return this;
824 }
825
826
827 }
828
829 static {
830 defaultInstance = new ColumnSchema(true);
831 org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.internalForceInit();
832 defaultInstance.initFields();
833 }
834
835
836 }
837
838 private static com.google.protobuf.Descriptors.Descriptor
839 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor;
840 private static
841 com.google.protobuf.GeneratedMessage.FieldAccessorTable
842 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_fieldAccessorTable;
843 private static com.google.protobuf.Descriptors.Descriptor
844 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor;
845 private static
846 com.google.protobuf.GeneratedMessage.FieldAccessorTable
847 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable;
848
849 public static com.google.protobuf.Descriptors.FileDescriptor
850 getDescriptor() {
851 return descriptor;
852 }
853 private static com.google.protobuf.Descriptors.FileDescriptor
854 descriptor;
855 static {
856 java.lang.String[] descriptorData = {
857 "\n\031ColumnSchemaMessage.proto\022/org.apache." +
858 "hadoop.hbase.rest.protobuf.generated\"\325\001\n" +
859 "\014ColumnSchema\022\014\n\004name\030\001 \001(\t\022V\n\005attrs\030\002 \003" +
860 "(\0132G.org.apache.hadoop.hbase.rest.protob" +
861 "uf.generated.ColumnSchema.Attribute\022\013\n\003t" +
862 "tl\030\003 \001(\005\022\023\n\013maxVersions\030\004 \001(\005\022\023\n\013compres" +
863 "sion\030\005 \001(\t\032(\n\tAttribute\022\014\n\004name\030\001 \002(\t\022\r\n" +
864 "\005value\030\002 \002(\t"
865 };
866 com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
867 new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
868 public com.google.protobuf.ExtensionRegistry assignDescriptors(
869 com.google.protobuf.Descriptors.FileDescriptor root) {
870 descriptor = root;
871 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor =
872 getDescriptor().getMessageTypes().get(0);
873 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_fieldAccessorTable = new
874 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
875 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor,
876 new java.lang.String[] { "Name", "Attrs", "Ttl", "MaxVersions", "Compression", },
877 org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.class,
878 org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder.class);
879 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor =
880 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_descriptor.getNestedTypes().get(0);
881 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_fieldAccessorTable = new
882 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
883 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_ColumnSchema_Attribute_descriptor,
884 new java.lang.String[] { "Name", "Value", },
885 org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.class,
886 org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute.Builder.class);
887 return null;
888 }
889 };
890 com.google.protobuf.Descriptors.FileDescriptor
891 .internalBuildGeneratedFileFrom(descriptorData,
892 new com.google.protobuf.Descriptors.FileDescriptor[] {
893 }, assigner);
894 }
895
896 public static void internalForceInit() {}
897
898
899 }