1 package org.apache.hadoop.hbase.avro.generated; 2 3 @SuppressWarnings("all") 4 public class ATableDescriptor extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { 5 public static final org.apache.avro.Schema SCHEMA$ = org.apache.avro.Schema.parse("{\"type\":\"record\",\"name\":\"ATableDescriptor\",\"namespace\":\"org.apache.hadoop.hbase.avro.generated\",\"fields\":[{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"families\",\"type\":[{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"AFamilyDescriptor\",\"fields\":[{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"compression\",\"type\":[{\"type\":\"enum\",\"name\":\"ACompressionAlgorithm\",\"symbols\":[\"LZO\",\"GZ\",\"NONE\"]},\"null\"]},{\"name\":\"maxVersions\",\"type\":[\"int\",\"null\"]},{\"name\":\"blocksize\",\"type\":[\"int\",\"null\"]},{\"name\":\"inMemory\",\"type\":[\"boolean\",\"null\"]},{\"name\":\"timeToLive\",\"type\":[\"int\",\"null\"]},{\"name\":\"blockCacheEnabled\",\"type\":[\"boolean\",\"null\"]}]}},\"null\"]},{\"name\":\"maxFileSize\",\"type\":[\"long\",\"null\"]},{\"name\":\"memStoreFlushSize\",\"type\":[\"long\",\"null\"]},{\"name\":\"rootRegion\",\"type\":[\"boolean\",\"null\"]},{\"name\":\"metaRegion\",\"type\":[\"boolean\",\"null\"]},{\"name\":\"metaTable\",\"type\":[\"boolean\",\"null\"]},{\"name\":\"readOnly\",\"type\":[\"boolean\",\"null\"]},{\"name\":\"deferredLogFlush\",\"type\":[\"boolean\",\"null\"]}]}"); 6 public java.nio.ByteBuffer name; 7 public org.apache.avro.generic.GenericArray<org.apache.hadoop.hbase.avro.generated.AFamilyDescriptor> families; 8 public java.lang.Long maxFileSize; 9 public java.lang.Long memStoreFlushSize; 10 public java.lang.Boolean rootRegion; 11 public java.lang.Boolean metaRegion; 12 public java.lang.Boolean metaTable; 13 public java.lang.Boolean readOnly; 14 public java.lang.Boolean deferredLogFlush; 15 public org.apache.avro.Schema getSchema() { return SCHEMA$; } 16 public java.lang.Object get(int field$) { 17 switch (field$) { 18 case 0: return name; 19 case 1: return families; 20 case 2: return maxFileSize; 21 case 3: return memStoreFlushSize; 22 case 4: return rootRegion; 23 case 5: return metaRegion; 24 case 6: return metaTable; 25 case 7: return readOnly; 26 case 8: return deferredLogFlush; 27 default: throw new org.apache.avro.AvroRuntimeException("Bad index"); 28 } 29 } 30 @SuppressWarnings(value="unchecked") 31 public void put(int field$, java.lang.Object value$) { 32 switch (field$) { 33 case 0: name = (java.nio.ByteBuffer)value$; break; 34 case 1: families = (org.apache.avro.generic.GenericArray<org.apache.hadoop.hbase.avro.generated.AFamilyDescriptor>)value$; break; 35 case 2: maxFileSize = (java.lang.Long)value$; break; 36 case 3: memStoreFlushSize = (java.lang.Long)value$; break; 37 case 4: rootRegion = (java.lang.Boolean)value$; break; 38 case 5: metaRegion = (java.lang.Boolean)value$; break; 39 case 6: metaTable = (java.lang.Boolean)value$; break; 40 case 7: readOnly = (java.lang.Boolean)value$; break; 41 case 8: deferredLogFlush = (java.lang.Boolean)value$; break; 42 default: throw new org.apache.avro.AvroRuntimeException("Bad index"); 43 } 44 } 45 }