View Javadoc

1   // Generated by the protocol buffer compiler.  DO NOT EDIT!
2   // source: TableSchemaMessage.proto
3   
4   package org.apache.hadoop.hbase.rest.protobuf.generated;
5   
6   public final class TableSchemaMessage {
7     private TableSchemaMessage() {}
8     public static void registerAllExtensions(
9         com.google.protobuf.ExtensionRegistry registry) {
10    }
11    public static final class TableSchema extends
12        com.google.protobuf.GeneratedMessage {
13      // Use TableSchema.newBuilder() to construct.
14      private TableSchema() {
15        initFields();
16      }
17      private TableSchema(boolean noInit) {}
18      
19      private static final TableSchema defaultInstance;
20      public static TableSchema getDefaultInstance() {
21        return defaultInstance;
22      }
23      
24      public TableSchema getDefaultInstanceForType() {
25        return defaultInstance;
26      }
27      
28      public static final com.google.protobuf.Descriptors.Descriptor
29          getDescriptor() {
30        return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor;
31      }
32      
33      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
34          internalGetFieldAccessorTable() {
35        return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_fieldAccessorTable;
36      }
37      
38      public static final class Attribute extends
39          com.google.protobuf.GeneratedMessage {
40        // Use Attribute.newBuilder() to construct.
41        private Attribute() {
42          initFields();
43        }
44        private Attribute(boolean noInit) {}
45        
46        private static final Attribute defaultInstance;
47        public static Attribute getDefaultInstance() {
48          return defaultInstance;
49        }
50        
51        public Attribute getDefaultInstanceForType() {
52          return defaultInstance;
53        }
54        
55        public static final com.google.protobuf.Descriptors.Descriptor
56            getDescriptor() {
57          return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor;
58        }
59        
60        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
61            internalGetFieldAccessorTable() {
62          return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_fieldAccessorTable;
63        }
64        
65        // required string name = 1;
66        public static final int NAME_FIELD_NUMBER = 1;
67        private boolean hasName;
68        private java.lang.String name_ = "";
69        public boolean hasName() { return hasName; }
70        public java.lang.String getName() { return name_; }
71        
72        // required string value = 2;
73        public static final int VALUE_FIELD_NUMBER = 2;
74        private boolean hasValue;
75        private java.lang.String value_ = "";
76        public boolean hasValue() { return hasValue; }
77        public java.lang.String getValue() { return value_; }
78        
79        private void initFields() {
80        }
81        public final boolean isInitialized() {
82          if (!hasName) return false;
83          if (!hasValue) return false;
84          return true;
85        }
86        
87        public void writeTo(com.google.protobuf.CodedOutputStream output)
88                            throws java.io.IOException {
89          getSerializedSize();
90          if (hasName()) {
91            output.writeString(1, getName());
92          }
93          if (hasValue()) {
94            output.writeString(2, getValue());
95          }
96          getUnknownFields().writeTo(output);
97        }
98        
99        private int memoizedSerializedSize = -1;
100       public int getSerializedSize() {
101         int size = memoizedSerializedSize;
102         if (size != -1) return size;
103       
104         size = 0;
105         if (hasName()) {
106           size += com.google.protobuf.CodedOutputStream
107             .computeStringSize(1, getName());
108         }
109         if (hasValue()) {
110           size += com.google.protobuf.CodedOutputStream
111             .computeStringSize(2, getValue());
112         }
113         size += getUnknownFields().getSerializedSize();
114         memoizedSerializedSize = size;
115         return size;
116       }
117       
118       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
119           com.google.protobuf.ByteString data)
120           throws com.google.protobuf.InvalidProtocolBufferException {
121         return newBuilder().mergeFrom(data).buildParsed();
122       }
123       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
124           com.google.protobuf.ByteString data,
125           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
126           throws com.google.protobuf.InvalidProtocolBufferException {
127         return newBuilder().mergeFrom(data, extensionRegistry)
128                  .buildParsed();
129       }
130       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(byte[] data)
131           throws com.google.protobuf.InvalidProtocolBufferException {
132         return newBuilder().mergeFrom(data).buildParsed();
133       }
134       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
135           byte[] data,
136           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
137           throws com.google.protobuf.InvalidProtocolBufferException {
138         return newBuilder().mergeFrom(data, extensionRegistry)
139                  .buildParsed();
140       }
141       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(java.io.InputStream input)
142           throws java.io.IOException {
143         return newBuilder().mergeFrom(input).buildParsed();
144       }
145       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
146           java.io.InputStream input,
147           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
148           throws java.io.IOException {
149         return newBuilder().mergeFrom(input, extensionRegistry)
150                  .buildParsed();
151       }
152       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseDelimitedFrom(java.io.InputStream input)
153           throws java.io.IOException {
154         Builder builder = newBuilder();
155         if (builder.mergeDelimitedFrom(input)) {
156           return builder.buildParsed();
157         } else {
158           return null;
159         }
160       }
161       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseDelimitedFrom(
162           java.io.InputStream input,
163           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
164           throws java.io.IOException {
165         Builder builder = newBuilder();
166         if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
167           return builder.buildParsed();
168         } else {
169           return null;
170         }
171       }
172       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
173           com.google.protobuf.CodedInputStream input)
174           throws java.io.IOException {
175         return newBuilder().mergeFrom(input).buildParsed();
176       }
177       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute parseFrom(
178           com.google.protobuf.CodedInputStream input,
179           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
180           throws java.io.IOException {
181         return newBuilder().mergeFrom(input, extensionRegistry)
182                  .buildParsed();
183       }
184       
185       public static Builder newBuilder() { return Builder.create(); }
186       public Builder newBuilderForType() { return newBuilder(); }
187       public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute prototype) {
188         return newBuilder().mergeFrom(prototype);
189       }
190       public Builder toBuilder() { return newBuilder(this); }
191       
192       public static final class Builder extends
193           com.google.protobuf.GeneratedMessage.Builder<Builder> {
194         private org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute result;
195         
196         // Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.newBuilder()
197         private Builder() {}
198         
199         private static Builder create() {
200           Builder builder = new Builder();
201           builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute();
202           return builder;
203         }
204         
205         protected org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute internalGetResult() {
206           return result;
207         }
208         
209         public Builder clear() {
210           if (result == null) {
211             throw new IllegalStateException(
212               "Cannot call clear() after build().");
213           }
214           result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute();
215           return this;
216         }
217         
218         public Builder clone() {
219           return create().mergeFrom(result);
220         }
221         
222         public com.google.protobuf.Descriptors.Descriptor
223             getDescriptorForType() {
224           return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.getDescriptor();
225         }
226         
227         public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute getDefaultInstanceForType() {
228           return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.getDefaultInstance();
229         }
230         
231         public boolean isInitialized() {
232           return result.isInitialized();
233         }
234         public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute build() {
235           if (result != null && !isInitialized()) {
236             throw newUninitializedMessageException(result);
237           }
238           return buildPartial();
239         }
240         
241         private org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute buildParsed()
242             throws com.google.protobuf.InvalidProtocolBufferException {
243           if (!isInitialized()) {
244             throw newUninitializedMessageException(
245               result).asInvalidProtocolBufferException();
246           }
247           return buildPartial();
248         }
249         
250         public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute buildPartial() {
251           if (result == null) {
252             throw new IllegalStateException(
253               "build() has already been called on this Builder.");
254           }
255           org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute returnMe = result;
256           result = null;
257           return returnMe;
258         }
259         
260         public Builder mergeFrom(com.google.protobuf.Message other) {
261           if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute) {
262             return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute)other);
263           } else {
264             super.mergeFrom(other);
265             return this;
266           }
267         }
268         
269         public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute other) {
270           if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.getDefaultInstance()) return this;
271           if (other.hasName()) {
272             setName(other.getName());
273           }
274           if (other.hasValue()) {
275             setValue(other.getValue());
276           }
277           this.mergeUnknownFields(other.getUnknownFields());
278           return this;
279         }
280         
281         public Builder mergeFrom(
282             com.google.protobuf.CodedInputStream input,
283             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
284             throws java.io.IOException {
285           com.google.protobuf.UnknownFieldSet.Builder unknownFields =
286             com.google.protobuf.UnknownFieldSet.newBuilder(
287               this.getUnknownFields());
288           while (true) {
289             int tag = input.readTag();
290             switch (tag) {
291               case 0:
292                 this.setUnknownFields(unknownFields.build());
293                 return this;
294               default: {
295                 if (!parseUnknownField(input, unknownFields,
296                                        extensionRegistry, tag)) {
297                   this.setUnknownFields(unknownFields.build());
298                   return this;
299                 }
300                 break;
301               }
302               case 10: {
303                 setName(input.readString());
304                 break;
305               }
306               case 18: {
307                 setValue(input.readString());
308                 break;
309               }
310             }
311           }
312         }
313         
314         
315         // required string name = 1;
316         public boolean hasName() {
317           return result.hasName();
318         }
319         public java.lang.String getName() {
320           return result.getName();
321         }
322         public Builder setName(java.lang.String value) {
323           if (value == null) {
324     throw new NullPointerException();
325   }
326   result.hasName = true;
327           result.name_ = value;
328           return this;
329         }
330         public Builder clearName() {
331           result.hasName = false;
332           result.name_ = getDefaultInstance().getName();
333           return this;
334         }
335         
336         // required string value = 2;
337         public boolean hasValue() {
338           return result.hasValue();
339         }
340         public java.lang.String getValue() {
341           return result.getValue();
342         }
343         public Builder setValue(java.lang.String value) {
344           if (value == null) {
345     throw new NullPointerException();
346   }
347   result.hasValue = true;
348           result.value_ = value;
349           return this;
350         }
351         public Builder clearValue() {
352           result.hasValue = false;
353           result.value_ = getDefaultInstance().getValue();
354           return this;
355         }
356         
357         // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute)
358       }
359       
360       static {
361         defaultInstance = new Attribute(true);
362         org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internalForceInit();
363         defaultInstance.initFields();
364       }
365       
366       // @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute)
367     }
368     
369     // optional string name = 1;
370     public static final int NAME_FIELD_NUMBER = 1;
371     private boolean hasName;
372     private java.lang.String name_ = "";
373     public boolean hasName() { return hasName; }
374     public java.lang.String getName() { return name_; }
375     
376     // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute attrs = 2;
377     public static final int ATTRS_FIELD_NUMBER = 2;
378     private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> attrs_ =
379       java.util.Collections.emptyList();
380     public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> getAttrsList() {
381       return attrs_;
382     }
383     public int getAttrsCount() { return attrs_.size(); }
384     public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute getAttrs(int index) {
385       return attrs_.get(index);
386     }
387     
388     // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema columns = 3;
389     public static final int COLUMNS_FIELD_NUMBER = 3;
390     private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema> columns_ =
391       java.util.Collections.emptyList();
392     public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema> getColumnsList() {
393       return columns_;
394     }
395     public int getColumnsCount() { return columns_.size(); }
396     public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema getColumns(int index) {
397       return columns_.get(index);
398     }
399     
400     // optional bool inMemory = 4;
401     public static final int INMEMORY_FIELD_NUMBER = 4;
402     private boolean hasInMemory;
403     private boolean inMemory_ = false;
404     public boolean hasInMemory() { return hasInMemory; }
405     public boolean getInMemory() { return inMemory_; }
406     
407     // optional bool readOnly = 5;
408     public static final int READONLY_FIELD_NUMBER = 5;
409     private boolean hasReadOnly;
410     private boolean readOnly_ = false;
411     public boolean hasReadOnly() { return hasReadOnly; }
412     public boolean getReadOnly() { return readOnly_; }
413     
414     private void initFields() {
415     }
416     public final boolean isInitialized() {
417       for (org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute element : getAttrsList()) {
418         if (!element.isInitialized()) return false;
419       }
420       for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema element : getColumnsList()) {
421         if (!element.isInitialized()) return false;
422       }
423       return true;
424     }
425     
426     public void writeTo(com.google.protobuf.CodedOutputStream output)
427                         throws java.io.IOException {
428       getSerializedSize();
429       if (hasName()) {
430         output.writeString(1, getName());
431       }
432       for (org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute element : getAttrsList()) {
433         output.writeMessage(2, element);
434       }
435       for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema element : getColumnsList()) {
436         output.writeMessage(3, element);
437       }
438       if (hasInMemory()) {
439         output.writeBool(4, getInMemory());
440       }
441       if (hasReadOnly()) {
442         output.writeBool(5, getReadOnly());
443       }
444       getUnknownFields().writeTo(output);
445     }
446     
447     private int memoizedSerializedSize = -1;
448     public int getSerializedSize() {
449       int size = memoizedSerializedSize;
450       if (size != -1) return size;
451     
452       size = 0;
453       if (hasName()) {
454         size += com.google.protobuf.CodedOutputStream
455           .computeStringSize(1, getName());
456       }
457       for (org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute element : getAttrsList()) {
458         size += com.google.protobuf.CodedOutputStream
459           .computeMessageSize(2, element);
460       }
461       for (org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema element : getColumnsList()) {
462         size += com.google.protobuf.CodedOutputStream
463           .computeMessageSize(3, element);
464       }
465       if (hasInMemory()) {
466         size += com.google.protobuf.CodedOutputStream
467           .computeBoolSize(4, getInMemory());
468       }
469       if (hasReadOnly()) {
470         size += com.google.protobuf.CodedOutputStream
471           .computeBoolSize(5, getReadOnly());
472       }
473       size += getUnknownFields().getSerializedSize();
474       memoizedSerializedSize = size;
475       return size;
476     }
477     
478     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
479         com.google.protobuf.ByteString data)
480         throws com.google.protobuf.InvalidProtocolBufferException {
481       return newBuilder().mergeFrom(data).buildParsed();
482     }
483     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
484         com.google.protobuf.ByteString data,
485         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
486         throws com.google.protobuf.InvalidProtocolBufferException {
487       return newBuilder().mergeFrom(data, extensionRegistry)
488                .buildParsed();
489     }
490     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(byte[] data)
491         throws com.google.protobuf.InvalidProtocolBufferException {
492       return newBuilder().mergeFrom(data).buildParsed();
493     }
494     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
495         byte[] data,
496         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
497         throws com.google.protobuf.InvalidProtocolBufferException {
498       return newBuilder().mergeFrom(data, extensionRegistry)
499                .buildParsed();
500     }
501     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(java.io.InputStream input)
502         throws java.io.IOException {
503       return newBuilder().mergeFrom(input).buildParsed();
504     }
505     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
506         java.io.InputStream input,
507         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
508         throws java.io.IOException {
509       return newBuilder().mergeFrom(input, extensionRegistry)
510                .buildParsed();
511     }
512     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseDelimitedFrom(java.io.InputStream input)
513         throws java.io.IOException {
514       Builder builder = newBuilder();
515       if (builder.mergeDelimitedFrom(input)) {
516         return builder.buildParsed();
517       } else {
518         return null;
519       }
520     }
521     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseDelimitedFrom(
522         java.io.InputStream input,
523         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
524         throws java.io.IOException {
525       Builder builder = newBuilder();
526       if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
527         return builder.buildParsed();
528       } else {
529         return null;
530       }
531     }
532     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
533         com.google.protobuf.CodedInputStream input)
534         throws java.io.IOException {
535       return newBuilder().mergeFrom(input).buildParsed();
536     }
537     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema parseFrom(
538         com.google.protobuf.CodedInputStream input,
539         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
540         throws java.io.IOException {
541       return newBuilder().mergeFrom(input, extensionRegistry)
542                .buildParsed();
543     }
544     
545     public static Builder newBuilder() { return Builder.create(); }
546     public Builder newBuilderForType() { return newBuilder(); }
547     public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema prototype) {
548       return newBuilder().mergeFrom(prototype);
549     }
550     public Builder toBuilder() { return newBuilder(this); }
551     
552     public static final class Builder extends
553         com.google.protobuf.GeneratedMessage.Builder<Builder> {
554       private org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema result;
555       
556       // Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.newBuilder()
557       private Builder() {}
558       
559       private static Builder create() {
560         Builder builder = new Builder();
561         builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema();
562         return builder;
563       }
564       
565       protected org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema internalGetResult() {
566         return result;
567       }
568       
569       public Builder clear() {
570         if (result == null) {
571           throw new IllegalStateException(
572             "Cannot call clear() after build().");
573         }
574         result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema();
575         return this;
576       }
577       
578       public Builder clone() {
579         return create().mergeFrom(result);
580       }
581       
582       public com.google.protobuf.Descriptors.Descriptor
583           getDescriptorForType() {
584         return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.getDescriptor();
585       }
586       
587       public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema getDefaultInstanceForType() {
588         return org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.getDefaultInstance();
589       }
590       
591       public boolean isInitialized() {
592         return result.isInitialized();
593       }
594       public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema build() {
595         if (result != null && !isInitialized()) {
596           throw newUninitializedMessageException(result);
597         }
598         return buildPartial();
599       }
600       
601       private org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema buildParsed()
602           throws com.google.protobuf.InvalidProtocolBufferException {
603         if (!isInitialized()) {
604           throw newUninitializedMessageException(
605             result).asInvalidProtocolBufferException();
606         }
607         return buildPartial();
608       }
609       
610       public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema buildPartial() {
611         if (result == null) {
612           throw new IllegalStateException(
613             "build() has already been called on this Builder.");
614         }
615         if (result.attrs_ != java.util.Collections.EMPTY_LIST) {
616           result.attrs_ =
617             java.util.Collections.unmodifiableList(result.attrs_);
618         }
619         if (result.columns_ != java.util.Collections.EMPTY_LIST) {
620           result.columns_ =
621             java.util.Collections.unmodifiableList(result.columns_);
622         }
623         org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema returnMe = result;
624         result = null;
625         return returnMe;
626       }
627       
628       public Builder mergeFrom(com.google.protobuf.Message other) {
629         if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema) {
630           return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema)other);
631         } else {
632           super.mergeFrom(other);
633           return this;
634         }
635       }
636       
637       public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema other) {
638         if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.getDefaultInstance()) return this;
639         if (other.hasName()) {
640           setName(other.getName());
641         }
642         if (!other.attrs_.isEmpty()) {
643           if (result.attrs_.isEmpty()) {
644             result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
645           }
646           result.attrs_.addAll(other.attrs_);
647         }
648         if (!other.columns_.isEmpty()) {
649           if (result.columns_.isEmpty()) {
650             result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
651           }
652           result.columns_.addAll(other.columns_);
653         }
654         if (other.hasInMemory()) {
655           setInMemory(other.getInMemory());
656         }
657         if (other.hasReadOnly()) {
658           setReadOnly(other.getReadOnly());
659         }
660         this.mergeUnknownFields(other.getUnknownFields());
661         return this;
662       }
663       
664       public Builder mergeFrom(
665           com.google.protobuf.CodedInputStream input,
666           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
667           throws java.io.IOException {
668         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
669           com.google.protobuf.UnknownFieldSet.newBuilder(
670             this.getUnknownFields());
671         while (true) {
672           int tag = input.readTag();
673           switch (tag) {
674             case 0:
675               this.setUnknownFields(unknownFields.build());
676               return this;
677             default: {
678               if (!parseUnknownField(input, unknownFields,
679                                      extensionRegistry, tag)) {
680                 this.setUnknownFields(unknownFields.build());
681                 return this;
682               }
683               break;
684             }
685             case 10: {
686               setName(input.readString());
687               break;
688             }
689             case 18: {
690               org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.newBuilder();
691               input.readMessage(subBuilder, extensionRegistry);
692               addAttrs(subBuilder.buildPartial());
693               break;
694             }
695             case 26: {
696               org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.newBuilder();
697               input.readMessage(subBuilder, extensionRegistry);
698               addColumns(subBuilder.buildPartial());
699               break;
700             }
701             case 32: {
702               setInMemory(input.readBool());
703               break;
704             }
705             case 40: {
706               setReadOnly(input.readBool());
707               break;
708             }
709           }
710         }
711       }
712       
713       
714       // optional string name = 1;
715       public boolean hasName() {
716         return result.hasName();
717       }
718       public java.lang.String getName() {
719         return result.getName();
720       }
721       public Builder setName(java.lang.String value) {
722         if (value == null) {
723     throw new NullPointerException();
724   }
725   result.hasName = true;
726         result.name_ = value;
727         return this;
728       }
729       public Builder clearName() {
730         result.hasName = false;
731         result.name_ = getDefaultInstance().getName();
732         return this;
733       }
734       
735       // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute attrs = 2;
736       public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> getAttrsList() {
737         return java.util.Collections.unmodifiableList(result.attrs_);
738       }
739       public int getAttrsCount() {
740         return result.getAttrsCount();
741       }
742       public org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute getAttrs(int index) {
743         return result.getAttrs(index);
744       }
745       public Builder setAttrs(int index, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute value) {
746         if (value == null) {
747           throw new NullPointerException();
748         }
749         result.attrs_.set(index, value);
750         return this;
751       }
752       public Builder setAttrs(int index, org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder builderForValue) {
753         result.attrs_.set(index, builderForValue.build());
754         return this;
755       }
756       public Builder addAttrs(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute value) {
757         if (value == null) {
758           throw new NullPointerException();
759         }
760         if (result.attrs_.isEmpty()) {
761           result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
762         }
763         result.attrs_.add(value);
764         return this;
765       }
766       public Builder addAttrs(org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder builderForValue) {
767         if (result.attrs_.isEmpty()) {
768           result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
769         }
770         result.attrs_.add(builderForValue.build());
771         return this;
772       }
773       public Builder addAllAttrs(
774           java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> values) {
775         if (result.attrs_.isEmpty()) {
776           result.attrs_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute>();
777         }
778         super.addAll(values, result.attrs_);
779         return this;
780       }
781       public Builder clearAttrs() {
782         result.attrs_ = java.util.Collections.emptyList();
783         return this;
784       }
785       
786       // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema columns = 3;
787       public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema> getColumnsList() {
788         return java.util.Collections.unmodifiableList(result.columns_);
789       }
790       public int getColumnsCount() {
791         return result.getColumnsCount();
792       }
793       public org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema getColumns(int index) {
794         return result.getColumns(index);
795       }
796       public Builder setColumns(int index, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema value) {
797         if (value == null) {
798           throw new NullPointerException();
799         }
800         result.columns_.set(index, value);
801         return this;
802       }
803       public Builder setColumns(int index, org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder builderForValue) {
804         result.columns_.set(index, builderForValue.build());
805         return this;
806       }
807       public Builder addColumns(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema value) {
808         if (value == null) {
809           throw new NullPointerException();
810         }
811         if (result.columns_.isEmpty()) {
812           result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
813         }
814         result.columns_.add(value);
815         return this;
816       }
817       public Builder addColumns(org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Builder builderForValue) {
818         if (result.columns_.isEmpty()) {
819           result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
820         }
821         result.columns_.add(builderForValue.build());
822         return this;
823       }
824       public Builder addAllColumns(
825           java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema> values) {
826         if (result.columns_.isEmpty()) {
827           result.columns_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema>();
828         }
829         super.addAll(values, result.columns_);
830         return this;
831       }
832       public Builder clearColumns() {
833         result.columns_ = java.util.Collections.emptyList();
834         return this;
835       }
836       
837       // optional bool inMemory = 4;
838       public boolean hasInMemory() {
839         return result.hasInMemory();
840       }
841       public boolean getInMemory() {
842         return result.getInMemory();
843       }
844       public Builder setInMemory(boolean value) {
845         result.hasInMemory = true;
846         result.inMemory_ = value;
847         return this;
848       }
849       public Builder clearInMemory() {
850         result.hasInMemory = false;
851         result.inMemory_ = false;
852         return this;
853       }
854       
855       // optional bool readOnly = 5;
856       public boolean hasReadOnly() {
857         return result.hasReadOnly();
858       }
859       public boolean getReadOnly() {
860         return result.getReadOnly();
861       }
862       public Builder setReadOnly(boolean value) {
863         result.hasReadOnly = true;
864         result.readOnly_ = value;
865         return this;
866       }
867       public Builder clearReadOnly() {
868         result.hasReadOnly = false;
869         result.readOnly_ = false;
870         return this;
871       }
872       
873       // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema)
874     }
875     
876     static {
877       defaultInstance = new TableSchema(true);
878       org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.internalForceInit();
879       defaultInstance.initFields();
880     }
881     
882     // @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema)
883   }
884   
885   private static com.google.protobuf.Descriptors.Descriptor
886     internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor;
887   private static
888     com.google.protobuf.GeneratedMessage.FieldAccessorTable
889       internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_fieldAccessorTable;
890   private static com.google.protobuf.Descriptors.Descriptor
891     internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor;
892   private static
893     com.google.protobuf.GeneratedMessage.FieldAccessorTable
894       internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_fieldAccessorTable;
895   
896   public static com.google.protobuf.Descriptors.FileDescriptor
897       getDescriptor() {
898     return descriptor;
899   }
900   private static com.google.protobuf.Descriptors.FileDescriptor
901       descriptor;
902   static {
903     java.lang.String[] descriptorData = {
904       "\n\030TableSchemaMessage.proto\022/org.apache.h" +
905       "adoop.hbase.rest.protobuf.generated\032\031Col" +
906       "umnSchemaMessage.proto\"\220\002\n\013TableSchema\022\014" +
907       "\n\004name\030\001 \001(\t\022U\n\005attrs\030\002 \003(\0132F.org.apache" +
908       ".hadoop.hbase.rest.protobuf.generated.Ta" +
909       "bleSchema.Attribute\022N\n\007columns\030\003 \003(\0132=.o" +
910       "rg.apache.hadoop.hbase.rest.protobuf.gen" +
911       "erated.ColumnSchema\022\020\n\010inMemory\030\004 \001(\010\022\020\n" +
912       "\010readOnly\030\005 \001(\010\032(\n\tAttribute\022\014\n\004name\030\001 \002" +
913       "(\t\022\r\n\005value\030\002 \002(\t"
914     };
915     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
916       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
917         public com.google.protobuf.ExtensionRegistry assignDescriptors(
918             com.google.protobuf.Descriptors.FileDescriptor root) {
919           descriptor = root;
920           internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor =
921             getDescriptor().getMessageTypes().get(0);
922           internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_fieldAccessorTable = new
923             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
924               internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor,
925               new java.lang.String[] { "Name", "Attrs", "Columns", "InMemory", "ReadOnly", },
926               org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.class,
927               org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Builder.class);
928           internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor =
929             internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_descriptor.getNestedTypes().get(0);
930           internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_fieldAccessorTable = new
931             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
932               internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableSchema_Attribute_descriptor,
933               new java.lang.String[] { "Name", "Value", },
934               org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.class,
935               org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute.Builder.class);
936           return null;
937         }
938       };
939     com.google.protobuf.Descriptors.FileDescriptor
940       .internalBuildGeneratedFileFrom(descriptorData,
941         new com.google.protobuf.Descriptors.FileDescriptor[] {
942           org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.getDescriptor(),
943         }, assigner);
944   }
945   
946   public static void internalForceInit() {}
947   
948   // @@protoc_insertion_point(outer_class_scope)
949 }