View Javadoc

1   // Generated by the protocol buffer compiler.  DO NOT EDIT!
2   // source: TableInfoMessage.proto
3   
4   package org.apache.hadoop.hbase.rest.protobuf.generated;
5   
6   public final class TableInfoMessage {
7     private TableInfoMessage() {}
8     public static void registerAllExtensions(
9         com.google.protobuf.ExtensionRegistry registry) {
10    }
11    public static final class TableInfo extends
12        com.google.protobuf.GeneratedMessage {
13      // Use TableInfo.newBuilder() to construct.
14      private TableInfo() {
15        initFields();
16      }
17      private TableInfo(boolean noInit) {}
18      
19      private static final TableInfo defaultInstance;
20      public static TableInfo getDefaultInstance() {
21        return defaultInstance;
22      }
23      
24      public TableInfo getDefaultInstanceForType() {
25        return defaultInstance;
26      }
27      
28      public static final com.google.protobuf.Descriptors.Descriptor
29          getDescriptor() {
30        return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor;
31      }
32      
33      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
34          internalGetFieldAccessorTable() {
35        return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_fieldAccessorTable;
36      }
37      
38      public static final class Region extends
39          com.google.protobuf.GeneratedMessage {
40        // Use Region.newBuilder() to construct.
41        private Region() {
42          initFields();
43        }
44        private Region(boolean noInit) {}
45        
46        private static final Region defaultInstance;
47        public static Region getDefaultInstance() {
48          return defaultInstance;
49        }
50        
51        public Region getDefaultInstanceForType() {
52          return defaultInstance;
53        }
54        
55        public static final com.google.protobuf.Descriptors.Descriptor
56            getDescriptor() {
57          return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor;
58        }
59        
60        protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
61            internalGetFieldAccessorTable() {
62          return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_fieldAccessorTable;
63        }
64        
65        // required string name = 1;
66        public static final int NAME_FIELD_NUMBER = 1;
67        private boolean hasName;
68        private java.lang.String name_ = "";
69        public boolean hasName() { return hasName; }
70        public java.lang.String getName() { return name_; }
71        
72        // optional bytes startKey = 2;
73        public static final int STARTKEY_FIELD_NUMBER = 2;
74        private boolean hasStartKey;
75        private com.google.protobuf.ByteString startKey_ = com.google.protobuf.ByteString.EMPTY;
76        public boolean hasStartKey() { return hasStartKey; }
77        public com.google.protobuf.ByteString getStartKey() { return startKey_; }
78        
79        // optional bytes endKey = 3;
80        public static final int ENDKEY_FIELD_NUMBER = 3;
81        private boolean hasEndKey;
82        private com.google.protobuf.ByteString endKey_ = com.google.protobuf.ByteString.EMPTY;
83        public boolean hasEndKey() { return hasEndKey; }
84        public com.google.protobuf.ByteString getEndKey() { return endKey_; }
85        
86        // optional int64 id = 4;
87        public static final int ID_FIELD_NUMBER = 4;
88        private boolean hasId;
89        private long id_ = 0L;
90        public boolean hasId() { return hasId; }
91        public long getId() { return id_; }
92        
93        // optional string location = 5;
94        public static final int LOCATION_FIELD_NUMBER = 5;
95        private boolean hasLocation;
96        private java.lang.String location_ = "";
97        public boolean hasLocation() { return hasLocation; }
98        public java.lang.String getLocation() { return location_; }
99        
100       private void initFields() {
101       }
102       public final boolean isInitialized() {
103         if (!hasName) return false;
104         return true;
105       }
106       
107       public void writeTo(com.google.protobuf.CodedOutputStream output)
108                           throws java.io.IOException {
109         getSerializedSize();
110         if (hasName()) {
111           output.writeString(1, getName());
112         }
113         if (hasStartKey()) {
114           output.writeBytes(2, getStartKey());
115         }
116         if (hasEndKey()) {
117           output.writeBytes(3, getEndKey());
118         }
119         if (hasId()) {
120           output.writeInt64(4, getId());
121         }
122         if (hasLocation()) {
123           output.writeString(5, getLocation());
124         }
125         getUnknownFields().writeTo(output);
126       }
127       
128       private int memoizedSerializedSize = -1;
129       public int getSerializedSize() {
130         int size = memoizedSerializedSize;
131         if (size != -1) return size;
132       
133         size = 0;
134         if (hasName()) {
135           size += com.google.protobuf.CodedOutputStream
136             .computeStringSize(1, getName());
137         }
138         if (hasStartKey()) {
139           size += com.google.protobuf.CodedOutputStream
140             .computeBytesSize(2, getStartKey());
141         }
142         if (hasEndKey()) {
143           size += com.google.protobuf.CodedOutputStream
144             .computeBytesSize(3, getEndKey());
145         }
146         if (hasId()) {
147           size += com.google.protobuf.CodedOutputStream
148             .computeInt64Size(4, getId());
149         }
150         if (hasLocation()) {
151           size += com.google.protobuf.CodedOutputStream
152             .computeStringSize(5, getLocation());
153         }
154         size += getUnknownFields().getSerializedSize();
155         memoizedSerializedSize = size;
156         return size;
157       }
158       
159       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
160           com.google.protobuf.ByteString data)
161           throws com.google.protobuf.InvalidProtocolBufferException {
162         return newBuilder().mergeFrom(data).buildParsed();
163       }
164       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
165           com.google.protobuf.ByteString data,
166           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
167           throws com.google.protobuf.InvalidProtocolBufferException {
168         return newBuilder().mergeFrom(data, extensionRegistry)
169                  .buildParsed();
170       }
171       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(byte[] data)
172           throws com.google.protobuf.InvalidProtocolBufferException {
173         return newBuilder().mergeFrom(data).buildParsed();
174       }
175       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
176           byte[] data,
177           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
178           throws com.google.protobuf.InvalidProtocolBufferException {
179         return newBuilder().mergeFrom(data, extensionRegistry)
180                  .buildParsed();
181       }
182       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(java.io.InputStream input)
183           throws java.io.IOException {
184         return newBuilder().mergeFrom(input).buildParsed();
185       }
186       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
187           java.io.InputStream input,
188           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
189           throws java.io.IOException {
190         return newBuilder().mergeFrom(input, extensionRegistry)
191                  .buildParsed();
192       }
193       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseDelimitedFrom(java.io.InputStream input)
194           throws java.io.IOException {
195         Builder builder = newBuilder();
196         if (builder.mergeDelimitedFrom(input)) {
197           return builder.buildParsed();
198         } else {
199           return null;
200         }
201       }
202       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseDelimitedFrom(
203           java.io.InputStream input,
204           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
205           throws java.io.IOException {
206         Builder builder = newBuilder();
207         if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
208           return builder.buildParsed();
209         } else {
210           return null;
211         }
212       }
213       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
214           com.google.protobuf.CodedInputStream input)
215           throws java.io.IOException {
216         return newBuilder().mergeFrom(input).buildParsed();
217       }
218       public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region parseFrom(
219           com.google.protobuf.CodedInputStream input,
220           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
221           throws java.io.IOException {
222         return newBuilder().mergeFrom(input, extensionRegistry)
223                  .buildParsed();
224       }
225       
226       public static Builder newBuilder() { return Builder.create(); }
227       public Builder newBuilderForType() { return newBuilder(); }
228       public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region prototype) {
229         return newBuilder().mergeFrom(prototype);
230       }
231       public Builder toBuilder() { return newBuilder(this); }
232       
233       public static final class Builder extends
234           com.google.protobuf.GeneratedMessage.Builder<Builder> {
235         private org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region result;
236         
237         // Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.newBuilder()
238         private Builder() {}
239         
240         private static Builder create() {
241           Builder builder = new Builder();
242           builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region();
243           return builder;
244         }
245         
246         protected org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region internalGetResult() {
247           return result;
248         }
249         
250         public Builder clear() {
251           if (result == null) {
252             throw new IllegalStateException(
253               "Cannot call clear() after build().");
254           }
255           result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region();
256           return this;
257         }
258         
259         public Builder clone() {
260           return create().mergeFrom(result);
261         }
262         
263         public com.google.protobuf.Descriptors.Descriptor
264             getDescriptorForType() {
265           return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.getDescriptor();
266         }
267         
268         public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region getDefaultInstanceForType() {
269           return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.getDefaultInstance();
270         }
271         
272         public boolean isInitialized() {
273           return result.isInitialized();
274         }
275         public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region build() {
276           if (result != null && !isInitialized()) {
277             throw newUninitializedMessageException(result);
278           }
279           return buildPartial();
280         }
281         
282         private org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region buildParsed()
283             throws com.google.protobuf.InvalidProtocolBufferException {
284           if (!isInitialized()) {
285             throw newUninitializedMessageException(
286               result).asInvalidProtocolBufferException();
287           }
288           return buildPartial();
289         }
290         
291         public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region buildPartial() {
292           if (result == null) {
293             throw new IllegalStateException(
294               "build() has already been called on this Builder.");
295           }
296           org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region returnMe = result;
297           result = null;
298           return returnMe;
299         }
300         
301         public Builder mergeFrom(com.google.protobuf.Message other) {
302           if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region) {
303             return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region)other);
304           } else {
305             super.mergeFrom(other);
306             return this;
307           }
308         }
309         
310         public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region other) {
311           if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.getDefaultInstance()) return this;
312           if (other.hasName()) {
313             setName(other.getName());
314           }
315           if (other.hasStartKey()) {
316             setStartKey(other.getStartKey());
317           }
318           if (other.hasEndKey()) {
319             setEndKey(other.getEndKey());
320           }
321           if (other.hasId()) {
322             setId(other.getId());
323           }
324           if (other.hasLocation()) {
325             setLocation(other.getLocation());
326           }
327           this.mergeUnknownFields(other.getUnknownFields());
328           return this;
329         }
330         
331         public Builder mergeFrom(
332             com.google.protobuf.CodedInputStream input,
333             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
334             throws java.io.IOException {
335           com.google.protobuf.UnknownFieldSet.Builder unknownFields =
336             com.google.protobuf.UnknownFieldSet.newBuilder(
337               this.getUnknownFields());
338           while (true) {
339             int tag = input.readTag();
340             switch (tag) {
341               case 0:
342                 this.setUnknownFields(unknownFields.build());
343                 return this;
344               default: {
345                 if (!parseUnknownField(input, unknownFields,
346                                        extensionRegistry, tag)) {
347                   this.setUnknownFields(unknownFields.build());
348                   return this;
349                 }
350                 break;
351               }
352               case 10: {
353                 setName(input.readString());
354                 break;
355               }
356               case 18: {
357                 setStartKey(input.readBytes());
358                 break;
359               }
360               case 26: {
361                 setEndKey(input.readBytes());
362                 break;
363               }
364               case 32: {
365                 setId(input.readInt64());
366                 break;
367               }
368               case 42: {
369                 setLocation(input.readString());
370                 break;
371               }
372             }
373           }
374         }
375         
376         
377         // required string name = 1;
378         public boolean hasName() {
379           return result.hasName();
380         }
381         public java.lang.String getName() {
382           return result.getName();
383         }
384         public Builder setName(java.lang.String value) {
385           if (value == null) {
386     throw new NullPointerException();
387   }
388   result.hasName = true;
389           result.name_ = value;
390           return this;
391         }
392         public Builder clearName() {
393           result.hasName = false;
394           result.name_ = getDefaultInstance().getName();
395           return this;
396         }
397         
398         // optional bytes startKey = 2;
399         public boolean hasStartKey() {
400           return result.hasStartKey();
401         }
402         public com.google.protobuf.ByteString getStartKey() {
403           return result.getStartKey();
404         }
405         public Builder setStartKey(com.google.protobuf.ByteString value) {
406           if (value == null) {
407     throw new NullPointerException();
408   }
409   result.hasStartKey = true;
410           result.startKey_ = value;
411           return this;
412         }
413         public Builder clearStartKey() {
414           result.hasStartKey = false;
415           result.startKey_ = getDefaultInstance().getStartKey();
416           return this;
417         }
418         
419         // optional bytes endKey = 3;
420         public boolean hasEndKey() {
421           return result.hasEndKey();
422         }
423         public com.google.protobuf.ByteString getEndKey() {
424           return result.getEndKey();
425         }
426         public Builder setEndKey(com.google.protobuf.ByteString value) {
427           if (value == null) {
428     throw new NullPointerException();
429   }
430   result.hasEndKey = true;
431           result.endKey_ = value;
432           return this;
433         }
434         public Builder clearEndKey() {
435           result.hasEndKey = false;
436           result.endKey_ = getDefaultInstance().getEndKey();
437           return this;
438         }
439         
440         // optional int64 id = 4;
441         public boolean hasId() {
442           return result.hasId();
443         }
444         public long getId() {
445           return result.getId();
446         }
447         public Builder setId(long value) {
448           result.hasId = true;
449           result.id_ = value;
450           return this;
451         }
452         public Builder clearId() {
453           result.hasId = false;
454           result.id_ = 0L;
455           return this;
456         }
457         
458         // optional string location = 5;
459         public boolean hasLocation() {
460           return result.hasLocation();
461         }
462         public java.lang.String getLocation() {
463           return result.getLocation();
464         }
465         public Builder setLocation(java.lang.String value) {
466           if (value == null) {
467     throw new NullPointerException();
468   }
469   result.hasLocation = true;
470           result.location_ = value;
471           return this;
472         }
473         public Builder clearLocation() {
474           result.hasLocation = false;
475           result.location_ = getDefaultInstance().getLocation();
476           return this;
477         }
478         
479         // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region)
480       }
481       
482       static {
483         defaultInstance = new Region(true);
484         org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internalForceInit();
485         defaultInstance.initFields();
486       }
487       
488       // @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region)
489     }
490     
491     // required string name = 1;
492     public static final int NAME_FIELD_NUMBER = 1;
493     private boolean hasName;
494     private java.lang.String name_ = "";
495     public boolean hasName() { return hasName; }
496     public java.lang.String getName() { return name_; }
497     
498     // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region regions = 2;
499     public static final int REGIONS_FIELD_NUMBER = 2;
500     private java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region> regions_ =
501       java.util.Collections.emptyList();
502     public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region> getRegionsList() {
503       return regions_;
504     }
505     public int getRegionsCount() { return regions_.size(); }
506     public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region getRegions(int index) {
507       return regions_.get(index);
508     }
509     
510     private void initFields() {
511     }
512     public final boolean isInitialized() {
513       if (!hasName) return false;
514       for (org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region element : getRegionsList()) {
515         if (!element.isInitialized()) return false;
516       }
517       return true;
518     }
519     
520     public void writeTo(com.google.protobuf.CodedOutputStream output)
521                         throws java.io.IOException {
522       getSerializedSize();
523       if (hasName()) {
524         output.writeString(1, getName());
525       }
526       for (org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region element : getRegionsList()) {
527         output.writeMessage(2, element);
528       }
529       getUnknownFields().writeTo(output);
530     }
531     
532     private int memoizedSerializedSize = -1;
533     public int getSerializedSize() {
534       int size = memoizedSerializedSize;
535       if (size != -1) return size;
536     
537       size = 0;
538       if (hasName()) {
539         size += com.google.protobuf.CodedOutputStream
540           .computeStringSize(1, getName());
541       }
542       for (org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region element : getRegionsList()) {
543         size += com.google.protobuf.CodedOutputStream
544           .computeMessageSize(2, element);
545       }
546       size += getUnknownFields().getSerializedSize();
547       memoizedSerializedSize = size;
548       return size;
549     }
550     
551     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
552         com.google.protobuf.ByteString data)
553         throws com.google.protobuf.InvalidProtocolBufferException {
554       return newBuilder().mergeFrom(data).buildParsed();
555     }
556     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
557         com.google.protobuf.ByteString data,
558         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
559         throws com.google.protobuf.InvalidProtocolBufferException {
560       return newBuilder().mergeFrom(data, extensionRegistry)
561                .buildParsed();
562     }
563     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(byte[] data)
564         throws com.google.protobuf.InvalidProtocolBufferException {
565       return newBuilder().mergeFrom(data).buildParsed();
566     }
567     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
568         byte[] data,
569         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
570         throws com.google.protobuf.InvalidProtocolBufferException {
571       return newBuilder().mergeFrom(data, extensionRegistry)
572                .buildParsed();
573     }
574     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(java.io.InputStream input)
575         throws java.io.IOException {
576       return newBuilder().mergeFrom(input).buildParsed();
577     }
578     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
579         java.io.InputStream input,
580         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
581         throws java.io.IOException {
582       return newBuilder().mergeFrom(input, extensionRegistry)
583                .buildParsed();
584     }
585     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseDelimitedFrom(java.io.InputStream input)
586         throws java.io.IOException {
587       Builder builder = newBuilder();
588       if (builder.mergeDelimitedFrom(input)) {
589         return builder.buildParsed();
590       } else {
591         return null;
592       }
593     }
594     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseDelimitedFrom(
595         java.io.InputStream input,
596         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
597         throws java.io.IOException {
598       Builder builder = newBuilder();
599       if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
600         return builder.buildParsed();
601       } else {
602         return null;
603       }
604     }
605     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
606         com.google.protobuf.CodedInputStream input)
607         throws java.io.IOException {
608       return newBuilder().mergeFrom(input).buildParsed();
609     }
610     public static org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo parseFrom(
611         com.google.protobuf.CodedInputStream input,
612         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
613         throws java.io.IOException {
614       return newBuilder().mergeFrom(input, extensionRegistry)
615                .buildParsed();
616     }
617     
618     public static Builder newBuilder() { return Builder.create(); }
619     public Builder newBuilderForType() { return newBuilder(); }
620     public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo prototype) {
621       return newBuilder().mergeFrom(prototype);
622     }
623     public Builder toBuilder() { return newBuilder(this); }
624     
625     public static final class Builder extends
626         com.google.protobuf.GeneratedMessage.Builder<Builder> {
627       private org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo result;
628       
629       // Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.newBuilder()
630       private Builder() {}
631       
632       private static Builder create() {
633         Builder builder = new Builder();
634         builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo();
635         return builder;
636       }
637       
638       protected org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo internalGetResult() {
639         return result;
640       }
641       
642       public Builder clear() {
643         if (result == null) {
644           throw new IllegalStateException(
645             "Cannot call clear() after build().");
646         }
647         result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo();
648         return this;
649       }
650       
651       public Builder clone() {
652         return create().mergeFrom(result);
653       }
654       
655       public com.google.protobuf.Descriptors.Descriptor
656           getDescriptorForType() {
657         return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.getDescriptor();
658       }
659       
660       public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo getDefaultInstanceForType() {
661         return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.getDefaultInstance();
662       }
663       
664       public boolean isInitialized() {
665         return result.isInitialized();
666       }
667       public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo build() {
668         if (result != null && !isInitialized()) {
669           throw newUninitializedMessageException(result);
670         }
671         return buildPartial();
672       }
673       
674       private org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo buildParsed()
675           throws com.google.protobuf.InvalidProtocolBufferException {
676         if (!isInitialized()) {
677           throw newUninitializedMessageException(
678             result).asInvalidProtocolBufferException();
679         }
680         return buildPartial();
681       }
682       
683       public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo buildPartial() {
684         if (result == null) {
685           throw new IllegalStateException(
686             "build() has already been called on this Builder.");
687         }
688         if (result.regions_ != java.util.Collections.EMPTY_LIST) {
689           result.regions_ =
690             java.util.Collections.unmodifiableList(result.regions_);
691         }
692         org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo returnMe = result;
693         result = null;
694         return returnMe;
695       }
696       
697       public Builder mergeFrom(com.google.protobuf.Message other) {
698         if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo) {
699           return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo)other);
700         } else {
701           super.mergeFrom(other);
702           return this;
703         }
704       }
705       
706       public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo other) {
707         if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.getDefaultInstance()) return this;
708         if (other.hasName()) {
709           setName(other.getName());
710         }
711         if (!other.regions_.isEmpty()) {
712           if (result.regions_.isEmpty()) {
713             result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region>();
714           }
715           result.regions_.addAll(other.regions_);
716         }
717         this.mergeUnknownFields(other.getUnknownFields());
718         return this;
719       }
720       
721       public Builder mergeFrom(
722           com.google.protobuf.CodedInputStream input,
723           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
724           throws java.io.IOException {
725         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
726           com.google.protobuf.UnknownFieldSet.newBuilder(
727             this.getUnknownFields());
728         while (true) {
729           int tag = input.readTag();
730           switch (tag) {
731             case 0:
732               this.setUnknownFields(unknownFields.build());
733               return this;
734             default: {
735               if (!parseUnknownField(input, unknownFields,
736                                      extensionRegistry, tag)) {
737                 this.setUnknownFields(unknownFields.build());
738                 return this;
739               }
740               break;
741             }
742             case 10: {
743               setName(input.readString());
744               break;
745             }
746             case 18: {
747               org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder subBuilder = org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.newBuilder();
748               input.readMessage(subBuilder, extensionRegistry);
749               addRegions(subBuilder.buildPartial());
750               break;
751             }
752           }
753         }
754       }
755       
756       
757       // required string name = 1;
758       public boolean hasName() {
759         return result.hasName();
760       }
761       public java.lang.String getName() {
762         return result.getName();
763       }
764       public Builder setName(java.lang.String value) {
765         if (value == null) {
766     throw new NullPointerException();
767   }
768   result.hasName = true;
769         result.name_ = value;
770         return this;
771       }
772       public Builder clearName() {
773         result.hasName = false;
774         result.name_ = getDefaultInstance().getName();
775         return this;
776       }
777       
778       // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region regions = 2;
779       public java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region> getRegionsList() {
780         return java.util.Collections.unmodifiableList(result.regions_);
781       }
782       public int getRegionsCount() {
783         return result.getRegionsCount();
784       }
785       public org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region getRegions(int index) {
786         return result.getRegions(index);
787       }
788       public Builder setRegions(int index, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region value) {
789         if (value == null) {
790           throw new NullPointerException();
791         }
792         result.regions_.set(index, value);
793         return this;
794       }
795       public Builder setRegions(int index, org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder builderForValue) {
796         result.regions_.set(index, builderForValue.build());
797         return this;
798       }
799       public Builder addRegions(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region value) {
800         if (value == null) {
801           throw new NullPointerException();
802         }
803         if (result.regions_.isEmpty()) {
804           result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region>();
805         }
806         result.regions_.add(value);
807         return this;
808       }
809       public Builder addRegions(org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder builderForValue) {
810         if (result.regions_.isEmpty()) {
811           result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region>();
812         }
813         result.regions_.add(builderForValue.build());
814         return this;
815       }
816       public Builder addAllRegions(
817           java.lang.Iterable<? extends org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region> values) {
818         if (result.regions_.isEmpty()) {
819           result.regions_ = new java.util.ArrayList<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region>();
820         }
821         super.addAll(values, result.regions_);
822         return this;
823       }
824       public Builder clearRegions() {
825         result.regions_ = java.util.Collections.emptyList();
826         return this;
827       }
828       
829       // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo)
830     }
831     
832     static {
833       defaultInstance = new TableInfo(true);
834       org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internalForceInit();
835       defaultInstance.initFields();
836     }
837     
838     // @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo)
839   }
840   
841   private static com.google.protobuf.Descriptors.Descriptor
842     internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor;
843   private static
844     com.google.protobuf.GeneratedMessage.FieldAccessorTable
845       internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_fieldAccessorTable;
846   private static com.google.protobuf.Descriptors.Descriptor
847     internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor;
848   private static
849     com.google.protobuf.GeneratedMessage.FieldAccessorTable
850       internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_fieldAccessorTable;
851   
852   public static com.google.protobuf.Descriptors.FileDescriptor
853       getDescriptor() {
854     return descriptor;
855   }
856   private static com.google.protobuf.Descriptors.FileDescriptor
857       descriptor;
858   static {
859     java.lang.String[] descriptorData = {
860       "\n\026TableInfoMessage.proto\022/org.apache.had" +
861       "oop.hbase.rest.protobuf.generated\"\305\001\n\tTa" +
862       "bleInfo\022\014\n\004name\030\001 \002(\t\022R\n\007regions\030\002 \003(\0132A" +
863       ".org.apache.hadoop.hbase.rest.protobuf.g" +
864       "enerated.TableInfo.Region\032V\n\006Region\022\014\n\004n" +
865       "ame\030\001 \002(\t\022\020\n\010startKey\030\002 \001(\014\022\016\n\006endKey\030\003 " +
866       "\001(\014\022\n\n\002id\030\004 \001(\003\022\020\n\010location\030\005 \001(\t"
867     };
868     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
869       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
870         public com.google.protobuf.ExtensionRegistry assignDescriptors(
871             com.google.protobuf.Descriptors.FileDescriptor root) {
872           descriptor = root;
873           internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor =
874             getDescriptor().getMessageTypes().get(0);
875           internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_fieldAccessorTable = new
876             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
877               internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor,
878               new java.lang.String[] { "Name", "Regions", },
879               org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.class,
880               org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Builder.class);
881           internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor =
882             internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor.getNestedTypes().get(0);
883           internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_fieldAccessorTable = new
884             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
885               internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_Region_descriptor,
886               new java.lang.String[] { "Name", "StartKey", "EndKey", "Id", "Location", },
887               org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.class,
888               org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region.Builder.class);
889           return null;
890         }
891       };
892     com.google.protobuf.Descriptors.FileDescriptor
893       .internalBuildGeneratedFileFrom(descriptorData,
894         new com.google.protobuf.Descriptors.FileDescriptor[] {
895         }, assigner);
896   }
897   
898   public static void internalForceInit() {}
899   
900   // @@protoc_insertion_point(outer_class_scope)
901 }