View Javadoc

1   // Generated by the protocol buffer compiler.  DO NOT EDIT!
2   // source: hbase.proto
3   
4   package org.apache.hadoop.hbase.protobuf.generated;
5   
6   public final class HBaseProtos {
7     private HBaseProtos() {}
8     public static void registerAllExtensions(
9         com.google.protobuf.ExtensionRegistry registry) {
10    }
11    public interface SnapshotDescriptionOrBuilder
12        extends com.google.protobuf.MessageOrBuilder {
13      
14      // required string name = 1;
15      boolean hasName();
16      String getName();
17      
18      // optional string table = 2;
19      boolean hasTable();
20      String getTable();
21      
22      // optional int64 creationTime = 3 [default = 0];
23      boolean hasCreationTime();
24      long getCreationTime();
25      
26      // optional .SnapshotDescription.Type type = 4 [default = FLUSH];
27      boolean hasType();
28      org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType();
29      
30      // optional int32 version = 5;
31      boolean hasVersion();
32      int getVersion();
33    }
34    public static final class SnapshotDescription extends
35        com.google.protobuf.GeneratedMessage
36        implements SnapshotDescriptionOrBuilder {
37      // Use SnapshotDescription.newBuilder() to construct.
38      private SnapshotDescription(Builder builder) {
39        super(builder);
40      }
41      private SnapshotDescription(boolean noInit) {}
42      
43      private static final SnapshotDescription defaultInstance;
44      public static SnapshotDescription getDefaultInstance() {
45        return defaultInstance;
46      }
47      
48      public SnapshotDescription getDefaultInstanceForType() {
49        return defaultInstance;
50      }
51      
52      public static final com.google.protobuf.Descriptors.Descriptor
53          getDescriptor() {
54        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_descriptor;
55      }
56      
57      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
58          internalGetFieldAccessorTable() {
59        return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_fieldAccessorTable;
60      }
61      
62      public enum Type
63          implements com.google.protobuf.ProtocolMessageEnum {
64        DISABLED(0, 0),
65        FLUSH(1, 1),
66        ;
67        
68        public static final int DISABLED_VALUE = 0;
69        public static final int FLUSH_VALUE = 1;
70        
71        
72        public final int getNumber() { return value; }
73        
74        public static Type valueOf(int value) {
75          switch (value) {
76            case 0: return DISABLED;
77            case 1: return FLUSH;
78            default: return null;
79          }
80        }
81        
82        public static com.google.protobuf.Internal.EnumLiteMap<Type>
83            internalGetValueMap() {
84          return internalValueMap;
85        }
86        private static com.google.protobuf.Internal.EnumLiteMap<Type>
87            internalValueMap =
88              new com.google.protobuf.Internal.EnumLiteMap<Type>() {
89                public Type findValueByNumber(int number) {
90                  return Type.valueOf(number);
91                }
92              };
93        
94        public final com.google.protobuf.Descriptors.EnumValueDescriptor
95            getValueDescriptor() {
96          return getDescriptor().getValues().get(index);
97        }
98        public final com.google.protobuf.Descriptors.EnumDescriptor
99            getDescriptorForType() {
100         return getDescriptor();
101       }
102       public static final com.google.protobuf.Descriptors.EnumDescriptor
103           getDescriptor() {
104         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDescriptor().getEnumTypes().get(0);
105       }
106       
107       private static final Type[] VALUES = {
108         DISABLED, FLUSH, 
109       };
110       
111       public static Type valueOf(
112           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
113         if (desc.getType() != getDescriptor()) {
114           throw new java.lang.IllegalArgumentException(
115             "EnumValueDescriptor is not for this type.");
116         }
117         return VALUES[desc.getIndex()];
118       }
119       
120       private final int index;
121       private final int value;
122       
123       private Type(int index, int value) {
124         this.index = index;
125         this.value = value;
126       }
127       
128       // @@protoc_insertion_point(enum_scope:SnapshotDescription.Type)
129     }
130     
131     private int bitField0_;
132     // required string name = 1;
133     public static final int NAME_FIELD_NUMBER = 1;
134     private java.lang.Object name_;
135     public boolean hasName() {
136       return ((bitField0_ & 0x00000001) == 0x00000001);
137     }
138     public String getName() {
139       java.lang.Object ref = name_;
140       if (ref instanceof String) {
141         return (String) ref;
142       } else {
143         com.google.protobuf.ByteString bs = 
144             (com.google.protobuf.ByteString) ref;
145         String s = bs.toStringUtf8();
146         if (com.google.protobuf.Internal.isValidUtf8(bs)) {
147           name_ = s;
148         }
149         return s;
150       }
151     }
152     private com.google.protobuf.ByteString getNameBytes() {
153       java.lang.Object ref = name_;
154       if (ref instanceof String) {
155         com.google.protobuf.ByteString b = 
156             com.google.protobuf.ByteString.copyFromUtf8((String) ref);
157         name_ = b;
158         return b;
159       } else {
160         return (com.google.protobuf.ByteString) ref;
161       }
162     }
163     
164     // optional string table = 2;
165     public static final int TABLE_FIELD_NUMBER = 2;
166     private java.lang.Object table_;
167     public boolean hasTable() {
168       return ((bitField0_ & 0x00000002) == 0x00000002);
169     }
170     public String getTable() {
171       java.lang.Object ref = table_;
172       if (ref instanceof String) {
173         return (String) ref;
174       } else {
175         com.google.protobuf.ByteString bs = 
176             (com.google.protobuf.ByteString) ref;
177         String s = bs.toStringUtf8();
178         if (com.google.protobuf.Internal.isValidUtf8(bs)) {
179           table_ = s;
180         }
181         return s;
182       }
183     }
184     private com.google.protobuf.ByteString getTableBytes() {
185       java.lang.Object ref = table_;
186       if (ref instanceof String) {
187         com.google.protobuf.ByteString b = 
188             com.google.protobuf.ByteString.copyFromUtf8((String) ref);
189         table_ = b;
190         return b;
191       } else {
192         return (com.google.protobuf.ByteString) ref;
193       }
194     }
195     
196     // optional int64 creationTime = 3 [default = 0];
197     public static final int CREATIONTIME_FIELD_NUMBER = 3;
198     private long creationTime_;
199     public boolean hasCreationTime() {
200       return ((bitField0_ & 0x00000004) == 0x00000004);
201     }
202     public long getCreationTime() {
203       return creationTime_;
204     }
205     
206     // optional .SnapshotDescription.Type type = 4 [default = FLUSH];
207     public static final int TYPE_FIELD_NUMBER = 4;
208     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_;
209     public boolean hasType() {
210       return ((bitField0_ & 0x00000008) == 0x00000008);
211     }
212     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() {
213       return type_;
214     }
215     
216     // optional int32 version = 5;
217     public static final int VERSION_FIELD_NUMBER = 5;
218     private int version_;
219     public boolean hasVersion() {
220       return ((bitField0_ & 0x00000010) == 0x00000010);
221     }
222     public int getVersion() {
223       return version_;
224     }
225     
226     private void initFields() {
227       name_ = "";
228       table_ = "";
229       creationTime_ = 0L;
230       type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH;
231       version_ = 0;
232     }
233     private byte memoizedIsInitialized = -1;
234     public final boolean isInitialized() {
235       byte isInitialized = memoizedIsInitialized;
236       if (isInitialized != -1) return isInitialized == 1;
237       
238       if (!hasName()) {
239         memoizedIsInitialized = 0;
240         return false;
241       }
242       memoizedIsInitialized = 1;
243       return true;
244     }
245     
246     public void writeTo(com.google.protobuf.CodedOutputStream output)
247                         throws java.io.IOException {
248       getSerializedSize();
249       if (((bitField0_ & 0x00000001) == 0x00000001)) {
250         output.writeBytes(1, getNameBytes());
251       }
252       if (((bitField0_ & 0x00000002) == 0x00000002)) {
253         output.writeBytes(2, getTableBytes());
254       }
255       if (((bitField0_ & 0x00000004) == 0x00000004)) {
256         output.writeInt64(3, creationTime_);
257       }
258       if (((bitField0_ & 0x00000008) == 0x00000008)) {
259         output.writeEnum(4, type_.getNumber());
260       }
261       if (((bitField0_ & 0x00000010) == 0x00000010)) {
262         output.writeInt32(5, version_);
263       }
264       getUnknownFields().writeTo(output);
265     }
266     
267     private int memoizedSerializedSize = -1;
268     public int getSerializedSize() {
269       int size = memoizedSerializedSize;
270       if (size != -1) return size;
271     
272       size = 0;
273       if (((bitField0_ & 0x00000001) == 0x00000001)) {
274         size += com.google.protobuf.CodedOutputStream
275           .computeBytesSize(1, getNameBytes());
276       }
277       if (((bitField0_ & 0x00000002) == 0x00000002)) {
278         size += com.google.protobuf.CodedOutputStream
279           .computeBytesSize(2, getTableBytes());
280       }
281       if (((bitField0_ & 0x00000004) == 0x00000004)) {
282         size += com.google.protobuf.CodedOutputStream
283           .computeInt64Size(3, creationTime_);
284       }
285       if (((bitField0_ & 0x00000008) == 0x00000008)) {
286         size += com.google.protobuf.CodedOutputStream
287           .computeEnumSize(4, type_.getNumber());
288       }
289       if (((bitField0_ & 0x00000010) == 0x00000010)) {
290         size += com.google.protobuf.CodedOutputStream
291           .computeInt32Size(5, version_);
292       }
293       size += getUnknownFields().getSerializedSize();
294       memoizedSerializedSize = size;
295       return size;
296     }
297     
298     private static final long serialVersionUID = 0L;
299     @java.lang.Override
300     protected java.lang.Object writeReplace()
301         throws java.io.ObjectStreamException {
302       return super.writeReplace();
303     }
304     
305     @java.lang.Override
306     public boolean equals(final java.lang.Object obj) {
307       if (obj == this) {
308        return true;
309       }
310       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription)) {
311         return super.equals(obj);
312       }
313       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) obj;
314       
315       boolean result = true;
316       result = result && (hasName() == other.hasName());
317       if (hasName()) {
318         result = result && getName()
319             .equals(other.getName());
320       }
321       result = result && (hasTable() == other.hasTable());
322       if (hasTable()) {
323         result = result && getTable()
324             .equals(other.getTable());
325       }
326       result = result && (hasCreationTime() == other.hasCreationTime());
327       if (hasCreationTime()) {
328         result = result && (getCreationTime()
329             == other.getCreationTime());
330       }
331       result = result && (hasType() == other.hasType());
332       if (hasType()) {
333         result = result &&
334             (getType() == other.getType());
335       }
336       result = result && (hasVersion() == other.hasVersion());
337       if (hasVersion()) {
338         result = result && (getVersion()
339             == other.getVersion());
340       }
341       result = result &&
342           getUnknownFields().equals(other.getUnknownFields());
343       return result;
344     }
345     
346     @java.lang.Override
347     public int hashCode() {
348       int hash = 41;
349       hash = (19 * hash) + getDescriptorForType().hashCode();
350       if (hasName()) {
351         hash = (37 * hash) + NAME_FIELD_NUMBER;
352         hash = (53 * hash) + getName().hashCode();
353       }
354       if (hasTable()) {
355         hash = (37 * hash) + TABLE_FIELD_NUMBER;
356         hash = (53 * hash) + getTable().hashCode();
357       }
358       if (hasCreationTime()) {
359         hash = (37 * hash) + CREATIONTIME_FIELD_NUMBER;
360         hash = (53 * hash) + hashLong(getCreationTime());
361       }
362       if (hasType()) {
363         hash = (37 * hash) + TYPE_FIELD_NUMBER;
364         hash = (53 * hash) + hashEnum(getType());
365       }
366       if (hasVersion()) {
367         hash = (37 * hash) + VERSION_FIELD_NUMBER;
368         hash = (53 * hash) + getVersion();
369       }
370       hash = (29 * hash) + getUnknownFields().hashCode();
371       return hash;
372     }
373     
374     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
375         com.google.protobuf.ByteString data)
376         throws com.google.protobuf.InvalidProtocolBufferException {
377       return newBuilder().mergeFrom(data).buildParsed();
378     }
379     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
380         com.google.protobuf.ByteString data,
381         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
382         throws com.google.protobuf.InvalidProtocolBufferException {
383       return newBuilder().mergeFrom(data, extensionRegistry)
384                .buildParsed();
385     }
386     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(byte[] data)
387         throws com.google.protobuf.InvalidProtocolBufferException {
388       return newBuilder().mergeFrom(data).buildParsed();
389     }
390     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
391         byte[] data,
392         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
393         throws com.google.protobuf.InvalidProtocolBufferException {
394       return newBuilder().mergeFrom(data, extensionRegistry)
395                .buildParsed();
396     }
397     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(java.io.InputStream input)
398         throws java.io.IOException {
399       return newBuilder().mergeFrom(input).buildParsed();
400     }
401     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
402         java.io.InputStream input,
403         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
404         throws java.io.IOException {
405       return newBuilder().mergeFrom(input, extensionRegistry)
406                .buildParsed();
407     }
408     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input)
409         throws java.io.IOException {
410       Builder builder = newBuilder();
411       if (builder.mergeDelimitedFrom(input)) {
412         return builder.buildParsed();
413       } else {
414         return null;
415       }
416     }
417     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom(
418         java.io.InputStream input,
419         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
420         throws java.io.IOException {
421       Builder builder = newBuilder();
422       if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
423         return builder.buildParsed();
424       } else {
425         return null;
426       }
427     }
428     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
429         com.google.protobuf.CodedInputStream input)
430         throws java.io.IOException {
431       return newBuilder().mergeFrom(input).buildParsed();
432     }
433     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(
434         com.google.protobuf.CodedInputStream input,
435         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
436         throws java.io.IOException {
437       return newBuilder().mergeFrom(input, extensionRegistry)
438                .buildParsed();
439     }
440     
441     public static Builder newBuilder() { return Builder.create(); }
442     public Builder newBuilderForType() { return newBuilder(); }
443     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription prototype) {
444       return newBuilder().mergeFrom(prototype);
445     }
446     public Builder toBuilder() { return newBuilder(this); }
447     
448     @java.lang.Override
449     protected Builder newBuilderForType(
450         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
451       Builder builder = new Builder(parent);
452       return builder;
453     }
454     public static final class Builder extends
455         com.google.protobuf.GeneratedMessage.Builder<Builder>
456        implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder {
457       public static final com.google.protobuf.Descriptors.Descriptor
458           getDescriptor() {
459         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_descriptor;
460       }
461       
462       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
463           internalGetFieldAccessorTable() {
464         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_fieldAccessorTable;
465       }
466       
467       // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder()
468       private Builder() {
469         maybeForceBuilderInitialization();
470       }
471       
472       private Builder(BuilderParent parent) {
473         super(parent);
474         maybeForceBuilderInitialization();
475       }
476       private void maybeForceBuilderInitialization() {
477         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
478         }
479       }
480       private static Builder create() {
481         return new Builder();
482       }
483       
484       public Builder clear() {
485         super.clear();
486         name_ = "";
487         bitField0_ = (bitField0_ & ~0x00000001);
488         table_ = "";
489         bitField0_ = (bitField0_ & ~0x00000002);
490         creationTime_ = 0L;
491         bitField0_ = (bitField0_ & ~0x00000004);
492         type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH;
493         bitField0_ = (bitField0_ & ~0x00000008);
494         version_ = 0;
495         bitField0_ = (bitField0_ & ~0x00000010);
496         return this;
497       }
498       
499       public Builder clone() {
500         return create().mergeFrom(buildPartial());
501       }
502       
503       public com.google.protobuf.Descriptors.Descriptor
504           getDescriptorForType() {
505         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDescriptor();
506       }
507       
508       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getDefaultInstanceForType() {
509         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
510       }
511       
512       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription build() {
513         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = buildPartial();
514         if (!result.isInitialized()) {
515           throw newUninitializedMessageException(result);
516         }
517         return result;
518       }
519       
520       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription buildParsed()
521           throws com.google.protobuf.InvalidProtocolBufferException {
522         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = buildPartial();
523         if (!result.isInitialized()) {
524           throw newUninitializedMessageException(
525             result).asInvalidProtocolBufferException();
526         }
527         return result;
528       }
529       
530       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription buildPartial() {
531         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription(this);
532         int from_bitField0_ = bitField0_;
533         int to_bitField0_ = 0;
534         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
535           to_bitField0_ |= 0x00000001;
536         }
537         result.name_ = name_;
538         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
539           to_bitField0_ |= 0x00000002;
540         }
541         result.table_ = table_;
542         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
543           to_bitField0_ |= 0x00000004;
544         }
545         result.creationTime_ = creationTime_;
546         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
547           to_bitField0_ |= 0x00000008;
548         }
549         result.type_ = type_;
550         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
551           to_bitField0_ |= 0x00000010;
552         }
553         result.version_ = version_;
554         result.bitField0_ = to_bitField0_;
555         onBuilt();
556         return result;
557       }
558       
559       public Builder mergeFrom(com.google.protobuf.Message other) {
560         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) {
561           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription)other);
562         } else {
563           super.mergeFrom(other);
564           return this;
565         }
566       }
567       
568       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other) {
569         if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) return this;
570         if (other.hasName()) {
571           setName(other.getName());
572         }
573         if (other.hasTable()) {
574           setTable(other.getTable());
575         }
576         if (other.hasCreationTime()) {
577           setCreationTime(other.getCreationTime());
578         }
579         if (other.hasType()) {
580           setType(other.getType());
581         }
582         if (other.hasVersion()) {
583           setVersion(other.getVersion());
584         }
585         this.mergeUnknownFields(other.getUnknownFields());
586         return this;
587       }
588       
589       public final boolean isInitialized() {
590         if (!hasName()) {
591           
592           return false;
593         }
594         return true;
595       }
596       
597       public Builder mergeFrom(
598           com.google.protobuf.CodedInputStream input,
599           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
600           throws java.io.IOException {
601         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
602           com.google.protobuf.UnknownFieldSet.newBuilder(
603             this.getUnknownFields());
604         while (true) {
605           int tag = input.readTag();
606           switch (tag) {
607             case 0:
608               this.setUnknownFields(unknownFields.build());
609               onChanged();
610               return this;
611             default: {
612               if (!parseUnknownField(input, unknownFields,
613                                      extensionRegistry, tag)) {
614                 this.setUnknownFields(unknownFields.build());
615                 onChanged();
616                 return this;
617               }
618               break;
619             }
620             case 10: {
621               bitField0_ |= 0x00000001;
622               name_ = input.readBytes();
623               break;
624             }
625             case 18: {
626               bitField0_ |= 0x00000002;
627               table_ = input.readBytes();
628               break;
629             }
630             case 24: {
631               bitField0_ |= 0x00000004;
632               creationTime_ = input.readInt64();
633               break;
634             }
635             case 32: {
636               int rawValue = input.readEnum();
637               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(rawValue);
638               if (value == null) {
639                 unknownFields.mergeVarintField(4, rawValue);
640               } else {
641                 bitField0_ |= 0x00000008;
642                 type_ = value;
643               }
644               break;
645             }
646             case 40: {
647               bitField0_ |= 0x00000010;
648               version_ = input.readInt32();
649               break;
650             }
651           }
652         }
653       }
654       
655       private int bitField0_;
656       
657       // required string name = 1;
658       private java.lang.Object name_ = "";
659       public boolean hasName() {
660         return ((bitField0_ & 0x00000001) == 0x00000001);
661       }
662       public String getName() {
663         java.lang.Object ref = name_;
664         if (!(ref instanceof String)) {
665           String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
666           name_ = s;
667           return s;
668         } else {
669           return (String) ref;
670         }
671       }
672       public Builder setName(String value) {
673         if (value == null) {
674     throw new NullPointerException();
675   }
676   bitField0_ |= 0x00000001;
677         name_ = value;
678         onChanged();
679         return this;
680       }
681       public Builder clearName() {
682         bitField0_ = (bitField0_ & ~0x00000001);
683         name_ = getDefaultInstance().getName();
684         onChanged();
685         return this;
686       }
687       void setName(com.google.protobuf.ByteString value) {
688         bitField0_ |= 0x00000001;
689         name_ = value;
690         onChanged();
691       }
692       
693       // optional string table = 2;
694       private java.lang.Object table_ = "";
695       public boolean hasTable() {
696         return ((bitField0_ & 0x00000002) == 0x00000002);
697       }
698       public String getTable() {
699         java.lang.Object ref = table_;
700         if (!(ref instanceof String)) {
701           String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
702           table_ = s;
703           return s;
704         } else {
705           return (String) ref;
706         }
707       }
708       public Builder setTable(String value) {
709         if (value == null) {
710     throw new NullPointerException();
711   }
712   bitField0_ |= 0x00000002;
713         table_ = value;
714         onChanged();
715         return this;
716       }
717       public Builder clearTable() {
718         bitField0_ = (bitField0_ & ~0x00000002);
719         table_ = getDefaultInstance().getTable();
720         onChanged();
721         return this;
722       }
723       void setTable(com.google.protobuf.ByteString value) {
724         bitField0_ |= 0x00000002;
725         table_ = value;
726         onChanged();
727       }
728       
729       // optional int64 creationTime = 3 [default = 0];
730       private long creationTime_ ;
731       public boolean hasCreationTime() {
732         return ((bitField0_ & 0x00000004) == 0x00000004);
733       }
734       public long getCreationTime() {
735         return creationTime_;
736       }
737       public Builder setCreationTime(long value) {
738         bitField0_ |= 0x00000004;
739         creationTime_ = value;
740         onChanged();
741         return this;
742       }
743       public Builder clearCreationTime() {
744         bitField0_ = (bitField0_ & ~0x00000004);
745         creationTime_ = 0L;
746         onChanged();
747         return this;
748       }
749       
750       // optional .SnapshotDescription.Type type = 4 [default = FLUSH];
751       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH;
752       public boolean hasType() {
753         return ((bitField0_ & 0x00000008) == 0x00000008);
754       }
755       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() {
756         return type_;
757       }
758       public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value) {
759         if (value == null) {
760           throw new NullPointerException();
761         }
762         bitField0_ |= 0x00000008;
763         type_ = value;
764         onChanged();
765         return this;
766       }
767       public Builder clearType() {
768         bitField0_ = (bitField0_ & ~0x00000008);
769         type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH;
770         onChanged();
771         return this;
772       }
773       
774       // optional int32 version = 5;
775       private int version_ ;
776       public boolean hasVersion() {
777         return ((bitField0_ & 0x00000010) == 0x00000010);
778       }
779       public int getVersion() {
780         return version_;
781       }
782       public Builder setVersion(int value) {
783         bitField0_ |= 0x00000010;
784         version_ = value;
785         onChanged();
786         return this;
787       }
788       public Builder clearVersion() {
789         bitField0_ = (bitField0_ & ~0x00000010);
790         version_ = 0;
791         onChanged();
792         return this;
793       }
794       
795       // @@protoc_insertion_point(builder_scope:SnapshotDescription)
796     }
797     
798     static {
799       defaultInstance = new SnapshotDescription(true);
800       defaultInstance.initFields();
801     }
802     
803     // @@protoc_insertion_point(class_scope:SnapshotDescription)
804   }
805   
806   public interface RegionServerInfoOrBuilder
807       extends com.google.protobuf.MessageOrBuilder {
808     
809     // optional int32 infoPort = 1;
810     boolean hasInfoPort();
811     int getInfoPort();
812   }
813   public static final class RegionServerInfo extends
814       com.google.protobuf.GeneratedMessage
815       implements RegionServerInfoOrBuilder {
816     // Use RegionServerInfo.newBuilder() to construct.
817     private RegionServerInfo(Builder builder) {
818       super(builder);
819     }
820     private RegionServerInfo(boolean noInit) {}
821     
822     private static final RegionServerInfo defaultInstance;
823     public static RegionServerInfo getDefaultInstance() {
824       return defaultInstance;
825     }
826     
827     public RegionServerInfo getDefaultInstanceForType() {
828       return defaultInstance;
829     }
830     
831     public static final com.google.protobuf.Descriptors.Descriptor
832         getDescriptor() {
833       return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_descriptor;
834     }
835     
836     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
837         internalGetFieldAccessorTable() {
838       return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_fieldAccessorTable;
839     }
840     
841     private int bitField0_;
842     // optional int32 infoPort = 1;
843     public static final int INFOPORT_FIELD_NUMBER = 1;
844     private int infoPort_;
845     public boolean hasInfoPort() {
846       return ((bitField0_ & 0x00000001) == 0x00000001);
847     }
848     public int getInfoPort() {
849       return infoPort_;
850     }
851     
852     private void initFields() {
853       infoPort_ = 0;
854     }
855     private byte memoizedIsInitialized = -1;
856     public final boolean isInitialized() {
857       byte isInitialized = memoizedIsInitialized;
858       if (isInitialized != -1) return isInitialized == 1;
859       
860       memoizedIsInitialized = 1;
861       return true;
862     }
863     
864     public void writeTo(com.google.protobuf.CodedOutputStream output)
865                         throws java.io.IOException {
866       getSerializedSize();
867       if (((bitField0_ & 0x00000001) == 0x00000001)) {
868         output.writeInt32(1, infoPort_);
869       }
870       getUnknownFields().writeTo(output);
871     }
872     
873     private int memoizedSerializedSize = -1;
874     public int getSerializedSize() {
875       int size = memoizedSerializedSize;
876       if (size != -1) return size;
877     
878       size = 0;
879       if (((bitField0_ & 0x00000001) == 0x00000001)) {
880         size += com.google.protobuf.CodedOutputStream
881           .computeInt32Size(1, infoPort_);
882       }
883       size += getUnknownFields().getSerializedSize();
884       memoizedSerializedSize = size;
885       return size;
886     }
887     
888     private static final long serialVersionUID = 0L;
889     @java.lang.Override
890     protected java.lang.Object writeReplace()
891         throws java.io.ObjectStreamException {
892       return super.writeReplace();
893     }
894     
895     @java.lang.Override
896     public boolean equals(final java.lang.Object obj) {
897       if (obj == this) {
898        return true;
899       }
900       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo)) {
901         return super.equals(obj);
902       }
903       org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo) obj;
904       
905       boolean result = true;
906       result = result && (hasInfoPort() == other.hasInfoPort());
907       if (hasInfoPort()) {
908         result = result && (getInfoPort()
909             == other.getInfoPort());
910       }
911       result = result &&
912           getUnknownFields().equals(other.getUnknownFields());
913       return result;
914     }
915     
916     @java.lang.Override
917     public int hashCode() {
918       int hash = 41;
919       hash = (19 * hash) + getDescriptorForType().hashCode();
920       if (hasInfoPort()) {
921         hash = (37 * hash) + INFOPORT_FIELD_NUMBER;
922         hash = (53 * hash) + getInfoPort();
923       }
924       hash = (29 * hash) + getUnknownFields().hashCode();
925       return hash;
926     }
927     
928     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
929         com.google.protobuf.ByteString data)
930         throws com.google.protobuf.InvalidProtocolBufferException {
931       return newBuilder().mergeFrom(data).buildParsed();
932     }
933     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
934         com.google.protobuf.ByteString data,
935         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
936         throws com.google.protobuf.InvalidProtocolBufferException {
937       return newBuilder().mergeFrom(data, extensionRegistry)
938                .buildParsed();
939     }
940     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(byte[] data)
941         throws com.google.protobuf.InvalidProtocolBufferException {
942       return newBuilder().mergeFrom(data).buildParsed();
943     }
944     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
945         byte[] data,
946         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
947         throws com.google.protobuf.InvalidProtocolBufferException {
948       return newBuilder().mergeFrom(data, extensionRegistry)
949                .buildParsed();
950     }
951     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(java.io.InputStream input)
952         throws java.io.IOException {
953       return newBuilder().mergeFrom(input).buildParsed();
954     }
955     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
956         java.io.InputStream input,
957         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
958         throws java.io.IOException {
959       return newBuilder().mergeFrom(input, extensionRegistry)
960                .buildParsed();
961     }
962     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom(java.io.InputStream input)
963         throws java.io.IOException {
964       Builder builder = newBuilder();
965       if (builder.mergeDelimitedFrom(input)) {
966         return builder.buildParsed();
967       } else {
968         return null;
969       }
970     }
971     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseDelimitedFrom(
972         java.io.InputStream input,
973         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
974         throws java.io.IOException {
975       Builder builder = newBuilder();
976       if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
977         return builder.buildParsed();
978       } else {
979         return null;
980       }
981     }
982     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
983         com.google.protobuf.CodedInputStream input)
984         throws java.io.IOException {
985       return newBuilder().mergeFrom(input).buildParsed();
986     }
987     public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo parseFrom(
988         com.google.protobuf.CodedInputStream input,
989         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
990         throws java.io.IOException {
991       return newBuilder().mergeFrom(input, extensionRegistry)
992                .buildParsed();
993     }
994     
995     public static Builder newBuilder() { return Builder.create(); }
996     public Builder newBuilderForType() { return newBuilder(); }
997     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo prototype) {
998       return newBuilder().mergeFrom(prototype);
999     }
1000     public Builder toBuilder() { return newBuilder(this); }
1001     
1002     @java.lang.Override
1003     protected Builder newBuilderForType(
1004         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1005       Builder builder = new Builder(parent);
1006       return builder;
1007     }
1008     public static final class Builder extends
1009         com.google.protobuf.GeneratedMessage.Builder<Builder>
1010        implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfoOrBuilder {
1011       public static final com.google.protobuf.Descriptors.Descriptor
1012           getDescriptor() {
1013         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_descriptor;
1014       }
1015       
1016       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
1017           internalGetFieldAccessorTable() {
1018         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionServerInfo_fieldAccessorTable;
1019       }
1020       
1021       // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.newBuilder()
1022       private Builder() {
1023         maybeForceBuilderInitialization();
1024       }
1025       
1026       private Builder(BuilderParent parent) {
1027         super(parent);
1028         maybeForceBuilderInitialization();
1029       }
1030       private void maybeForceBuilderInitialization() {
1031         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1032         }
1033       }
1034       private static Builder create() {
1035         return new Builder();
1036       }
1037       
1038       public Builder clear() {
1039         super.clear();
1040         infoPort_ = 0;
1041         bitField0_ = (bitField0_ & ~0x00000001);
1042         return this;
1043       }
1044       
1045       public Builder clone() {
1046         return create().mergeFrom(buildPartial());
1047       }
1048       
1049       public com.google.protobuf.Descriptors.Descriptor
1050           getDescriptorForType() {
1051         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.getDescriptor();
1052       }
1053       
1054       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo getDefaultInstanceForType() {
1055         return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.getDefaultInstance();
1056       }
1057       
1058       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo build() {
1059         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo result = buildPartial();
1060         if (!result.isInitialized()) {
1061           throw newUninitializedMessageException(result);
1062         }
1063         return result;
1064       }
1065       
1066       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo buildParsed()
1067           throws com.google.protobuf.InvalidProtocolBufferException {
1068         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo result = buildPartial();
1069         if (!result.isInitialized()) {
1070           throw newUninitializedMessageException(
1071             result).asInvalidProtocolBufferException();
1072         }
1073         return result;
1074       }
1075       
1076       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo buildPartial() {
1077         org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo(this);
1078         int from_bitField0_ = bitField0_;
1079         int to_bitField0_ = 0;
1080         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1081           to_bitField0_ |= 0x00000001;
1082         }
1083         result.infoPort_ = infoPort_;
1084         result.bitField0_ = to_bitField0_;
1085         onBuilt();
1086         return result;
1087       }
1088       
1089       public Builder mergeFrom(com.google.protobuf.Message other) {
1090         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo) {
1091           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo)other);
1092         } else {
1093           super.mergeFrom(other);
1094           return this;
1095         }
1096       }
1097       
1098       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo other) {
1099         if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.getDefaultInstance()) return this;
1100         if (other.hasInfoPort()) {
1101           setInfoPort(other.getInfoPort());
1102         }
1103         this.mergeUnknownFields(other.getUnknownFields());
1104         return this;
1105       }
1106       
1107       public final boolean isInitialized() {
1108         return true;
1109       }
1110       
1111       public Builder mergeFrom(
1112           com.google.protobuf.CodedInputStream input,
1113           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1114           throws java.io.IOException {
1115         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1116           com.google.protobuf.UnknownFieldSet.newBuilder(
1117             this.getUnknownFields());
1118         while (true) {
1119           int tag = input.readTag();
1120           switch (tag) {
1121             case 0:
1122               this.setUnknownFields(unknownFields.build());
1123               onChanged();
1124               return this;
1125             default: {
1126               if (!parseUnknownField(input, unknownFields,
1127                                      extensionRegistry, tag)) {
1128                 this.setUnknownFields(unknownFields.build());
1129                 onChanged();
1130                 return this;
1131               }
1132               break;
1133             }
1134             case 8: {
1135               bitField0_ |= 0x00000001;
1136               infoPort_ = input.readInt32();
1137               break;
1138             }
1139           }
1140         }
1141       }
1142       
1143       private int bitField0_;
1144       
1145       // optional int32 infoPort = 1;
1146       private int infoPort_ ;
1147       public boolean hasInfoPort() {
1148         return ((bitField0_ & 0x00000001) == 0x00000001);
1149       }
1150       public int getInfoPort() {
1151         return infoPort_;
1152       }
1153       public Builder setInfoPort(int value) {
1154         bitField0_ |= 0x00000001;
1155         infoPort_ = value;
1156         onChanged();
1157         return this;
1158       }
1159       public Builder clearInfoPort() {
1160         bitField0_ = (bitField0_ & ~0x00000001);
1161         infoPort_ = 0;
1162         onChanged();
1163         return this;
1164       }
1165       
1166       // @@protoc_insertion_point(builder_scope:RegionServerInfo)
1167     }
1168     
1169     static {
1170       defaultInstance = new RegionServerInfo(true);
1171       defaultInstance.initFields();
1172     }
1173     
1174     // @@protoc_insertion_point(class_scope:RegionServerInfo)
1175   }
1176   
1177   private static com.google.protobuf.Descriptors.Descriptor
1178     internal_static_SnapshotDescription_descriptor;
1179   private static
1180     com.google.protobuf.GeneratedMessage.FieldAccessorTable
1181       internal_static_SnapshotDescription_fieldAccessorTable;
1182   private static com.google.protobuf.Descriptors.Descriptor
1183     internal_static_RegionServerInfo_descriptor;
1184   private static
1185     com.google.protobuf.GeneratedMessage.FieldAccessorTable
1186       internal_static_RegionServerInfo_fieldAccessorTable;
1187   
1188   public static com.google.protobuf.Descriptors.FileDescriptor
1189       getDescriptor() {
1190     return descriptor;
1191   }
1192   private static com.google.protobuf.Descriptors.FileDescriptor
1193       descriptor;
1194   static {
1195     java.lang.String[] descriptorData = {
1196       "\n\013hbase.proto\"\255\001\n\023SnapshotDescription\022\014\n" +
1197       "\004name\030\001 \002(\t\022\r\n\005table\030\002 \001(\t\022\027\n\014creationTi" +
1198       "me\030\003 \001(\003:\0010\022.\n\004type\030\004 \001(\0162\031.SnapshotDesc" +
1199       "ription.Type:\005FLUSH\022\017\n\007version\030\005 \001(\005\"\037\n\004" +
1200       "Type\022\014\n\010DISABLED\020\000\022\t\n\005FLUSH\020\001\"$\n\020RegionS" +
1201       "erverInfo\022\020\n\010infoPort\030\001 \001(\005B>\n*org.apach" +
1202       "e.hadoop.hbase.protobuf.generatedB\013HBase" +
1203       "ProtosH\001\240\001\001"
1204     };
1205     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
1206       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
1207         public com.google.protobuf.ExtensionRegistry assignDescriptors(
1208             com.google.protobuf.Descriptors.FileDescriptor root) {
1209           descriptor = root;
1210           internal_static_SnapshotDescription_descriptor =
1211             getDescriptor().getMessageTypes().get(0);
1212           internal_static_SnapshotDescription_fieldAccessorTable = new
1213             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1214               internal_static_SnapshotDescription_descriptor,
1215               new java.lang.String[] { "Name", "Table", "CreationTime", "Type", "Version", },
1216               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.class,
1217               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class);
1218           internal_static_RegionServerInfo_descriptor =
1219             getDescriptor().getMessageTypes().get(1);
1220           internal_static_RegionServerInfo_fieldAccessorTable = new
1221             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1222               internal_static_RegionServerInfo_descriptor,
1223               new java.lang.String[] { "InfoPort", },
1224               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.class,
1225               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionServerInfo.Builder.class);
1226           return null;
1227         }
1228       };
1229     com.google.protobuf.Descriptors.FileDescriptor
1230       .internalBuildGeneratedFileFrom(descriptorData,
1231         new com.google.protobuf.Descriptors.FileDescriptor[] {
1232         }, assigner);
1233   }
1234   
1235   // @@protoc_insertion_point(outer_class_scope)
1236 }