1
2
3
4 package org.apache.hadoop.hbase.rest.protobuf.generated;
5
6 public final class TableListMessage {
7 private TableListMessage() {}
8 public static void registerAllExtensions(
9 com.google.protobuf.ExtensionRegistry registry) {
10 }
11 public static final class TableList extends
12 com.google.protobuf.GeneratedMessage {
13
14 private TableList() {
15 initFields();
16 }
17 private TableList(boolean noInit) {}
18
19 private static final TableList defaultInstance;
20 public static TableList getDefaultInstance() {
21 return defaultInstance;
22 }
23
24 public TableList getDefaultInstanceForType() {
25 return defaultInstance;
26 }
27
28 public static final com.google.protobuf.Descriptors.Descriptor
29 getDescriptor() {
30 return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
31 }
32
33 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
34 internalGetFieldAccessorTable() {
35 return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
36 }
37
38
39 public static final int NAME_FIELD_NUMBER = 1;
40 private java.util.List<java.lang.String> name_ =
41 java.util.Collections.emptyList();
42 public java.util.List<java.lang.String> getNameList() {
43 return name_;
44 }
45 public int getNameCount() { return name_.size(); }
46 public java.lang.String getName(int index) {
47 return name_.get(index);
48 }
49
50 private void initFields() {
51 }
52 public final boolean isInitialized() {
53 return true;
54 }
55
56 public void writeTo(com.google.protobuf.CodedOutputStream output)
57 throws java.io.IOException {
58 getSerializedSize();
59 for (java.lang.String element : getNameList()) {
60 output.writeString(1, element);
61 }
62 getUnknownFields().writeTo(output);
63 }
64
65 private int memoizedSerializedSize = -1;
66 public int getSerializedSize() {
67 int size = memoizedSerializedSize;
68 if (size != -1) return size;
69
70 size = 0;
71 {
72 int dataSize = 0;
73 for (java.lang.String element : getNameList()) {
74 dataSize += com.google.protobuf.CodedOutputStream
75 .computeStringSizeNoTag(element);
76 }
77 size += dataSize;
78 size += 1 * getNameList().size();
79 }
80 size += getUnknownFields().getSerializedSize();
81 memoizedSerializedSize = size;
82 return size;
83 }
84
85 public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
86 com.google.protobuf.ByteString data)
87 throws com.google.protobuf.InvalidProtocolBufferException {
88 return newBuilder().mergeFrom(data).buildParsed();
89 }
90 public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
91 com.google.protobuf.ByteString data,
92 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
93 throws com.google.protobuf.InvalidProtocolBufferException {
94 return newBuilder().mergeFrom(data, extensionRegistry)
95 .buildParsed();
96 }
97 public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(byte[] data)
98 throws com.google.protobuf.InvalidProtocolBufferException {
99 return newBuilder().mergeFrom(data).buildParsed();
100 }
101 public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
102 byte[] data,
103 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
104 throws com.google.protobuf.InvalidProtocolBufferException {
105 return newBuilder().mergeFrom(data, extensionRegistry)
106 .buildParsed();
107 }
108 public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(java.io.InputStream input)
109 throws java.io.IOException {
110 return newBuilder().mergeFrom(input).buildParsed();
111 }
112 public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
113 java.io.InputStream input,
114 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
115 throws java.io.IOException {
116 return newBuilder().mergeFrom(input, extensionRegistry)
117 .buildParsed();
118 }
119 public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(java.io.InputStream input)
120 throws java.io.IOException {
121 Builder builder = newBuilder();
122 if (builder.mergeDelimitedFrom(input)) {
123 return builder.buildParsed();
124 } else {
125 return null;
126 }
127 }
128 public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(
129 java.io.InputStream input,
130 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
131 throws java.io.IOException {
132 Builder builder = newBuilder();
133 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
134 return builder.buildParsed();
135 } else {
136 return null;
137 }
138 }
139 public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
140 com.google.protobuf.CodedInputStream input)
141 throws java.io.IOException {
142 return newBuilder().mergeFrom(input).buildParsed();
143 }
144 public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
145 com.google.protobuf.CodedInputStream input,
146 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
147 throws java.io.IOException {
148 return newBuilder().mergeFrom(input, extensionRegistry)
149 .buildParsed();
150 }
151
152 public static Builder newBuilder() { return Builder.create(); }
153 public Builder newBuilderForType() { return newBuilder(); }
154 public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList prototype) {
155 return newBuilder().mergeFrom(prototype);
156 }
157 public Builder toBuilder() { return newBuilder(this); }
158
159 public static final class Builder extends
160 com.google.protobuf.GeneratedMessage.Builder<Builder> {
161 private org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result;
162
163
164 private Builder() {}
165
166 private static Builder create() {
167 Builder builder = new Builder();
168 builder.result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList();
169 return builder;
170 }
171
172 protected org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList internalGetResult() {
173 return result;
174 }
175
176 public Builder clear() {
177 if (result == null) {
178 throw new IllegalStateException(
179 "Cannot call clear() after build().");
180 }
181 result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList();
182 return this;
183 }
184
185 public Builder clone() {
186 return create().mergeFrom(result);
187 }
188
189 public com.google.protobuf.Descriptors.Descriptor
190 getDescriptorForType() {
191 return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDescriptor();
192 }
193
194 public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList getDefaultInstanceForType() {
195 return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDefaultInstance();
196 }
197
198 public boolean isInitialized() {
199 return result.isInitialized();
200 }
201 public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList build() {
202 if (result != null && !isInitialized()) {
203 throw newUninitializedMessageException(result);
204 }
205 return buildPartial();
206 }
207
208 private org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildParsed()
209 throws com.google.protobuf.InvalidProtocolBufferException {
210 if (!isInitialized()) {
211 throw newUninitializedMessageException(
212 result).asInvalidProtocolBufferException();
213 }
214 return buildPartial();
215 }
216
217 public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildPartial() {
218 if (result == null) {
219 throw new IllegalStateException(
220 "build() has already been called on this Builder.");
221 }
222 if (result.name_ != java.util.Collections.EMPTY_LIST) {
223 result.name_ =
224 java.util.Collections.unmodifiableList(result.name_);
225 }
226 org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList returnMe = result;
227 result = null;
228 return returnMe;
229 }
230
231 public Builder mergeFrom(com.google.protobuf.Message other) {
232 if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList) {
233 return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList)other);
234 } else {
235 super.mergeFrom(other);
236 return this;
237 }
238 }
239
240 public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList other) {
241 if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDefaultInstance()) return this;
242 if (!other.name_.isEmpty()) {
243 if (result.name_.isEmpty()) {
244 result.name_ = new java.util.ArrayList<java.lang.String>();
245 }
246 result.name_.addAll(other.name_);
247 }
248 this.mergeUnknownFields(other.getUnknownFields());
249 return this;
250 }
251
252 public Builder mergeFrom(
253 com.google.protobuf.CodedInputStream input,
254 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
255 throws java.io.IOException {
256 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
257 com.google.protobuf.UnknownFieldSet.newBuilder(
258 this.getUnknownFields());
259 while (true) {
260 int tag = input.readTag();
261 switch (tag) {
262 case 0:
263 this.setUnknownFields(unknownFields.build());
264 return this;
265 default: {
266 if (!parseUnknownField(input, unknownFields,
267 extensionRegistry, tag)) {
268 this.setUnknownFields(unknownFields.build());
269 return this;
270 }
271 break;
272 }
273 case 10: {
274 addName(input.readString());
275 break;
276 }
277 }
278 }
279 }
280
281
282
283 public java.util.List<java.lang.String> getNameList() {
284 return java.util.Collections.unmodifiableList(result.name_);
285 }
286 public int getNameCount() {
287 return result.getNameCount();
288 }
289 public java.lang.String getName(int index) {
290 return result.getName(index);
291 }
292 public Builder setName(int index, java.lang.String value) {
293 if (value == null) {
294 throw new NullPointerException();
295 }
296 result.name_.set(index, value);
297 return this;
298 }
299 public Builder addName(java.lang.String value) {
300 if (value == null) {
301 throw new NullPointerException();
302 }
303 if (result.name_.isEmpty()) {
304 result.name_ = new java.util.ArrayList<java.lang.String>();
305 }
306 result.name_.add(value);
307 return this;
308 }
309 public Builder addAllName(
310 java.lang.Iterable<? extends java.lang.String> values) {
311 if (result.name_.isEmpty()) {
312 result.name_ = new java.util.ArrayList<java.lang.String>();
313 }
314 super.addAll(values, result.name_);
315 return this;
316 }
317 public Builder clearName() {
318 result.name_ = java.util.Collections.emptyList();
319 return this;
320 }
321
322
323 }
324
325 static {
326 defaultInstance = new TableList(true);
327 org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internalForceInit();
328 defaultInstance.initFields();
329 }
330
331
332 }
333
334 private static com.google.protobuf.Descriptors.Descriptor
335 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
336 private static
337 com.google.protobuf.GeneratedMessage.FieldAccessorTable
338 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
339
340 public static com.google.protobuf.Descriptors.FileDescriptor
341 getDescriptor() {
342 return descriptor;
343 }
344 private static com.google.protobuf.Descriptors.FileDescriptor
345 descriptor;
346 static {
347 java.lang.String[] descriptorData = {
348 "\n\026TableListMessage.proto\022/org.apache.had" +
349 "oop.hbase.rest.protobuf.generated\"\031\n\tTab" +
350 "leList\022\014\n\004name\030\001 \003(\t"
351 };
352 com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
353 new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
354 public com.google.protobuf.ExtensionRegistry assignDescriptors(
355 com.google.protobuf.Descriptors.FileDescriptor root) {
356 descriptor = root;
357 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor =
358 getDescriptor().getMessageTypes().get(0);
359 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable = new
360 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
361 internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor,
362 new java.lang.String[] { "Name", },
363 org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class,
364 org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
365 return null;
366 }
367 };
368 com.google.protobuf.Descriptors.FileDescriptor
369 .internalBuildGeneratedFileFrom(descriptorData,
370 new com.google.protobuf.Descriptors.FileDescriptor[] {
371 }, assigner);
372 }
373
374 public static void internalForceInit() {}
375
376
377 }