Serialized Form
Package org.apache.hadoop.hive.hwi |
serialVersionUID: 1L
Package org.apache.hadoop.hive.metastore |
Package org.apache.hadoop.hive.metastore.api |
message
String message
__isset
org.apache.hadoop.hive.metastore.api.AlreadyExistsException.Isset __isset
message
String message
__isset
org.apache.hadoop.hive.metastore.api.ConfigValSecurityException.Isset __isset
name
String name
description
String description
locationUri
String locationUri
__isset
org.apache.hadoop.hive.metastore.api.Database.Isset __isset
name
String name
type
String type
comment
String comment
__isset
org.apache.hadoop.hive.metastore.api.FieldSchema.Isset __isset
indexName
String indexName
indexType
int indexType
tableName
String tableName
dbName
String dbName
colNames
List<E> colNames
partName
String partName
__isset
org.apache.hadoop.hive.metastore.api.Index.Isset __isset
message
String message
__isset
org.apache.hadoop.hive.metastore.api.IndexAlreadyExistsException.Isset __isset
message
String message
__isset
org.apache.hadoop.hive.metastore.api.InvalidObjectException.Isset __isset
message
String message
__isset
org.apache.hadoop.hive.metastore.api.InvalidOperationException.Isset __isset
message
String message
__isset
org.apache.hadoop.hive.metastore.api.MetaException.Isset __isset
message
String message
__isset
org.apache.hadoop.hive.metastore.api.NoSuchObjectException.Isset __isset
col
String col
order
int order
__isset
org.apache.hadoop.hive.metastore.api.Order.Isset __isset
values
List<E> values
dbName
String dbName
tableName
String tableName
createTime
int createTime
lastAccessTime
int lastAccessTime
sd
StorageDescriptor sd
parameters
Map<K,V> parameters
__isset
org.apache.hadoop.hive.metastore.api.Partition.Isset __isset
fieldSchemas
List<E> fieldSchemas
properties
Map<K,V> properties
__isset
org.apache.hadoop.hive.metastore.api.Schema.Isset __isset
name
String name
serializationLib
String serializationLib
parameters
Map<K,V> parameters
__isset
org.apache.hadoop.hive.metastore.api.SerDeInfo.Isset __isset
cols
List<E> cols
location
String location
inputFormat
String inputFormat
outputFormat
String outputFormat
compressed
boolean compressed
numBuckets
int numBuckets
serdeInfo
SerDeInfo serdeInfo
bucketCols
List<E> bucketCols
sortCols
List<E> sortCols
parameters
Map<K,V> parameters
__isset
org.apache.hadoop.hive.metastore.api.StorageDescriptor.Isset __isset
tableName
String tableName
dbName
String dbName
owner
String owner
createTime
int createTime
lastAccessTime
int lastAccessTime
retention
int retention
sd
StorageDescriptor sd
partitionKeys
List<E> partitionKeys
parameters
Map<K,V> parameters
viewOriginalText
String viewOriginalText
viewExpandedText
String viewExpandedText
tableType
String tableType
__isset
org.apache.hadoop.hive.metastore.api.Table.Isset __isset
new_part
Partition new_part
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.add_partition_args.Isset __isset
success
Partition success
o1
InvalidObjectException o1
o2
AlreadyExistsException o2
o3
MetaException o3
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.add_partition_result.Isset __isset
db_name
String db_name
tbl_name
String tbl_name
new_part
Partition new_part
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.alter_partition_args.Isset __isset
o1
InvalidOperationException o1
o2
MetaException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.alter_partition_result.Isset __isset
dbname
String dbname
tbl_name
String tbl_name
new_tbl
Table new_tbl
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.alter_table_args.Isset __isset
o1
InvalidOperationException o1
o2
MetaException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.alter_table_result.Isset __isset
db_name
String db_name
tbl_name
String tbl_name
part_vals
List<E> part_vals
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.append_partition_args.Isset __isset
db_name
String db_name
tbl_name
String tbl_name
part_name
String part_name
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.append_partition_by_name_args.Isset __isset
success
Partition success
o1
InvalidObjectException o1
o2
AlreadyExistsException o2
o3
MetaException o3
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.append_partition_by_name_result.Isset __isset
success
Partition success
o1
InvalidObjectException o1
o2
AlreadyExistsException o2
o3
MetaException o3
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.append_partition_result.Isset __isset
database
Database database
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.create_database_args.Isset __isset
o1
AlreadyExistsException o1
o2
InvalidObjectException o2
o3
MetaException o3
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.create_database_result.Isset __isset
tbl
Table tbl
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.create_table_args.Isset __isset
o1
AlreadyExistsException o1
o2
InvalidObjectException o2
o3
MetaException o3
o4
NoSuchObjectException o4
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.create_table_result.Isset __isset
type
Type type
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.create_type_args.Isset __isset
success
boolean success
o1
AlreadyExistsException o1
o2
InvalidObjectException o2
o3
MetaException o3
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.create_type_result.Isset __isset
name
String name
deleteData
boolean deleteData
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.drop_database_args.Isset __isset
o1
NoSuchObjectException o1
o2
InvalidOperationException o2
o3
MetaException o3
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.drop_database_result.Isset __isset
db_name
String db_name
tbl_name
String tbl_name
part_vals
List<E> part_vals
deleteData
boolean deleteData
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.drop_partition_args.Isset __isset
db_name
String db_name
tbl_name
String tbl_name
part_name
String part_name
deleteData
boolean deleteData
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.drop_partition_by_name_args.Isset __isset
success
boolean success
o1
NoSuchObjectException o1
o2
MetaException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.drop_partition_by_name_result.Isset __isset
success
boolean success
o1
NoSuchObjectException o1
o2
MetaException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.drop_partition_result.Isset __isset
dbname
String dbname
name
String name
deleteData
boolean deleteData
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.drop_table_args.Isset __isset
o1
NoSuchObjectException o1
o3
MetaException o3
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.drop_table_result.Isset __isset
type
String type
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.drop_type_args.Isset __isset
success
boolean success
o1
MetaException o1
o2
NoSuchObjectException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.drop_type_result.Isset __isset
success
List<E> success
o1
MetaException o1
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_all_databases_result.Isset __isset
db_name
String db_name
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_all_tables_args.Isset __isset
success
List<E> success
o1
MetaException o1
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_all_tables_result.Isset __isset
name
String name
defaultValue
String defaultValue
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_config_value_args.Isset __isset
success
String success
o1
ConfigValSecurityException o1
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_config_value_result.Isset __isset
name
String name
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_database_args.Isset __isset
success
Database success
o1
NoSuchObjectException o1
o2
MetaException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_database_result.Isset __isset
pattern
String pattern
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_databases_args.Isset __isset
success
List<E> success
o1
MetaException o1
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_databases_result.Isset __isset
db_name
String db_name
table_name
String table_name
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_fields_args.Isset __isset
success
List<E> success
o1
MetaException o1
o2
UnknownTableException o2
o3
UnknownDBException o3
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_fields_result.Isset __isset
db_name
String db_name
tbl_name
String tbl_name
part_vals
List<E> part_vals
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partition_args.Isset __isset
db_name
String db_name
tbl_name
String tbl_name
part_name
String part_name
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partition_by_name_args.Isset __isset
success
Partition success
o1
MetaException o1
o2
NoSuchObjectException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partition_by_name_result.Isset __isset
db_name
String db_name
tbl_name
String tbl_name
max_parts
short max_parts
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partition_names_args.Isset __isset
db_name
String db_name
tbl_name
String tbl_name
part_vals
List<E> part_vals
max_parts
short max_parts
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partition_names_ps_args.Isset __isset
success
List<E> success
o1
MetaException o1
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partition_names_ps_result.Isset __isset
success
List<E> success
o2
MetaException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partition_names_result.Isset __isset
success
Partition success
o1
MetaException o1
o2
NoSuchObjectException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partition_result.Isset __isset
db_name
String db_name
tbl_name
String tbl_name
max_parts
short max_parts
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partitions_args.Isset __isset
db_name
String db_name
tbl_name
String tbl_name
part_vals
List<E> part_vals
max_parts
short max_parts
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partitions_ps_args.Isset __isset
success
List<E> success
o1
MetaException o1
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partitions_ps_result.Isset __isset
success
List<E> success
o1
NoSuchObjectException o1
o2
MetaException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_partitions_result.Isset __isset
db_name
String db_name
table_name
String table_name
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_schema_args.Isset __isset
success
List<E> success
o1
MetaException o1
o2
UnknownTableException o2
o3
UnknownDBException o3
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_schema_result.Isset __isset
dbname
String dbname
tbl_name
String tbl_name
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_table_args.Isset __isset
success
Table success
o1
MetaException o1
o2
NoSuchObjectException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_table_result.Isset __isset
db_name
String db_name
pattern
String pattern
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_tables_args.Isset __isset
success
List<E> success
o1
MetaException o1
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_tables_result.Isset __isset
name
String name
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_type_all_args.Isset __isset
success
Map<K,V> success
o2
MetaException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_type_all_result.Isset __isset
name
String name
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_type_args.Isset __isset
success
Type success
o1
MetaException o1
o2
NoSuchObjectException o2
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.get_type_result.Isset __isset
part_name
String part_name
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.partition_name_to_spec_args.Isset __isset
success
Map<K,V> success
o1
MetaException o1
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.partition_name_to_spec_result.Isset __isset
part_name
String part_name
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.partition_name_to_vals_args.Isset __isset
success
List<E> success
o1
MetaException o1
__isset
org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore.partition_name_to_vals_result.Isset __isset
name
String name
type1
String type1
type2
String type2
fields
List<E> fields
__isset
org.apache.hadoop.hive.metastore.api.Type.Isset __isset
message
String message
__isset
org.apache.hadoop.hive.metastore.api.UnknownDBException.Isset __isset
message
String message
__isset
org.apache.hadoop.hive.metastore.api.UnknownTableException.Isset __isset
version
String version
comments
String comments
__isset
org.apache.hadoop.hive.metastore.api.Version.Isset __isset
Package org.apache.hadoop.hive.ql |
serialVersionUID: 1L
queryString
String queryString
rootTasks
ArrayList<E> rootTasks
fetchTask
FetchTask fetchTask
inputs
HashSet<E> inputs
outputs
HashSet<E> outputs
- Note: outputs are not all determined at compile time.
Some of the tasks can change the outputs at run time, because only at run
time, we know what are the changes. These tasks should keep a reference
to the outputs here.
linfo
LineageInfo linfo
- Lineage information for the query.
idToTableNameMap
HashMap<K,V> idToTableNameMap
queryId
String queryId
query
org.apache.hadoop.hive.ql.plan.api.Query query
counters
HashMap<K,V> counters
done
HashSet<E> done
started
HashSet<E> started
Package org.apache.hadoop.hive.ql.exec |
serialVersionUID: 1L
serialVersionUID: 1L
serialVersionUID: 1L
firstRow
boolean firstRow
serialVersionUID: 1L
internalName
String internalName
alias
String alias
tabAlias
String tabAlias
- Store the alias of the table where available.
isPartitionCol
boolean isPartitionCol
- Indicates whether the column is a partition column.
serialVersionUID: 1L
storage
HashMap<K,V> storage
joinEmitInterval
int joinEmitInterval
joinCacheSize
int joinCacheSize
nextSz
int nextSz
hconf
org.apache.hadoop.conf.Configuration hconf
serialVersionUID: 1L
listTasks
List<E> listTasks
resolved
boolean resolved
resTasks
List<E> resTasks
resolver
ConditionalResolver resolver
resolverCtx
Object resolverCtx
serialVersionUID: 1L
serialVersionUID: 1L
serialVersionUID: 1L
serialVersionUID: 1L
dependeciesTaskSet
Set<E> dependeciesTaskSet
Class org.apache.hadoop.hive.ql.exec.ExprNodeIndexEvaluator extends Object implements Serializable |
serialVersionUID: 1L
isNativeTable
boolean isNativeTable
work
FetchWork work
splitNum
int splitNum
currPart
PartitionDesc currPart
currTbl
TableDesc currTbl
tblDataDone
boolean tblDataDone
serialVersionUID: 1L
maxRows
int maxRows
ftOp
FetchOperator ftOp
mSerde
LazySimpleSerDe mSerde
totalRows
int totalRows
serialVersionUID: 1L
outputClass
Class<T> outputClass
taskId
String taskId
filesCreated
boolean filesCreated
recordValue
org.apache.hadoop.io.Writable recordValue
serialVersionUID: 1L
serialVersionUID: 1L
serialVersionUID: 1L
serialVersionUID: 1L
keyProber
org.apache.hadoop.hive.ql.exec.GroupByOperator.KeyWrapper keyProber
serialVersionUID: 1L
serialVersionUID: 1L
serialVersionUID: 1L
acc
ArrayList<E> acc
selectObjs
ArrayList<E> selectObjs
serialVersionUID: 1L
Class org.apache.hadoop.hive.ql.exec.MapJoinObject extends Object implements Serializable |
serialVersionUID: 1L
serialVersionUID: 1L
opCtxMap
Map<K,V> opCtxMap
operatorToPaths
Map<K,V> operatorToPaths
childrenPaths
ArrayList<E> childrenPaths
extraChildrenToClose
ArrayList<E> extraChildrenToClose
serialVersionUID: 1L
serialVersionUID: 1L
serialVersionUID: 1L
serialVersionUID: 1L
childOperators
List<E> childOperators
parentOperators
List<E> parentOperators
operatorId
String operatorId
counterNames
ArrayList<E> counterNames
- List of counter names associated with the operator. It contains the
following default counters NUM_INPUT_ROWS NUM_OUTPUT_ROWS TIME_TAKEN
Individual operators can add to this list via addToCounterNames methods.
counterNameToEnum
HashMap<K,V> counterNameToEnum
- Each operator has its own map of its counter names to disjoint
ProgressCounter - it is populated at compile time and is read in at
run-time while extracting the operator specific counts.
conf
Serializable conf
done
boolean done
serialVersionUID: 1L
firstRow
boolean firstRow
serialVersionUID: 1L
signature
ArrayList<E> signature
serialVersionUID: 1L
serialVersionUID: 1L
serialVersionUID: 1L
localWork
MapredLocalWork localWork
fetchOperators
Map<K,V> fetchOperators
nextGroupStorage
HashMap<K,V> nextGroupStorage
candidateStorage
HashMap<K,V> candidateStorage
serialVersionUID: 1L
neededColumnIDs
ArrayList<E> neededColumnIDs
serialVersionUID: 1L
childTasks
List<E> childTasks
parentTasks
List<E> parentTasks
id
String id
work
Serializable work
serialVersionUID: 1L
serialVersionUID: 1L
Class org.apache.hadoop.hive.ql.exec.TypedBytesRecordReader extends Object implements Serializable |
funcClass
Class<T> funcClass
- The UDF or UDAF class that has the ambiguity.
argTypeInfos
List<E> argTypeInfos
- The list of parameter types.
methods
List<E> methods
- The list of matched methods.
argumentId
int argumentId
serialVersionUID: 1L
LOG
org.apache.commons.logging.Log LOG
udtfInputOIs
ObjectInspector[] udtfInputOIs
objToSendToUDTF
Object[] objToSendToUDTF
forwardObj
Object[] forwardObj
serialVersionUID: 1L
parentObjInspectors
StructObjectInspector[] parentObjInspectors
parentFields
List<E>[] parentFields
columnTypeResolvers
GenericUDFUtils.ReturnObjectInspectorResolver[] columnTypeResolvers
needsTransform
boolean[] needsTransform
outputRow
ArrayList<E> outputRow
Package org.apache.hadoop.hive.ql.exec.persistence |
readExternal
public void readExternal(ObjectInput in)
throws IOException,
ClassNotFoundException
- Throws:
IOException
ClassNotFoundException
writeExternal
public void writeExternal(ObjectOutput out)
throws IOException
- Throws:
IOException
readExternal
public void readExternal(ObjectInput in)
throws IOException,
ClassNotFoundException
- Throws:
IOException
ClassNotFoundException
writeExternal
public void writeExternal(ObjectOutput out)
throws IOException
- Throws:
IOException
Package org.apache.hadoop.hive.ql.hooks |
serialVersionUID: 1L
index
Map<K,V> index
- The map contains an index from the (datacontainer, columnname) to the
dependency vector for that tuple. This is used to generate the
dependency vectors during the walk of the operator tree.
serialVersionUID: 1L
tabAlias
LineageInfo.TableAliasInfo tabAlias
- The table and alias info encapsulated in a different class.
column
FieldSchema column
- The metastore column information. The column can be null
and that denotes that the expression is dependent on the row
of the table and not particular column. This can happen in case
of count(1).
serialVersionUID: 1L
tab
Table tab
- The table in case this container is a table.
part
Partition part
- The partition in case this container is a partition.
serialVersionUID: 1L
type
LineageInfo.DependencyType type
- The type of dependency.
expr
String expr
- Expression string for the dependency.
baseCols
List<E> baseCols
- The list of base columns that the particular column depends on.
serialVersionUID: 1L
dc
LineageInfo.DataContainer dc
- The data container for this key.
fld
FieldSchema fld
- The field schema for this key.
serialVersionUID: 1L
alias
String alias
- The alias for the table.
table
Table table
- The metastore table information.
serialVersionUID: 1L
t
Table t
- The table.
p
Partition p
- The partition. This is null for a non partitioned table.
name
String name
- This is derived from t and p, but we need to serialize this field to make sure
ReadEntity.hashCode() does not need to recursively read into t and p.
serialVersionUID: 1L
typ
WriteEntity.Type typ
- The type.
t
Table t
- The table. This is null if this is a directory.
p
Partition p
- The partition.This is null if this object is not a partition.
d
String d
- The directory if this is a directory.
name
String name
- This is derived from t and p, but we need to serialize this field to make sure
WriteEntity.hashCode() does not need to recursively read into t and p.
Package org.apache.hadoop.hive.ql.metadata |
tableName
String tableName
table
Table table
tPartition
Partition tPartition
deserializer
Deserializer deserializer
- These fields are cached. The information comes from tPartition.
outputFormatClass
Class<T> outputFormatClass
inputFormatClass
Class<T> inputFormatClass
uri
URI uri
serialVersionUID: 1L
tTable
Table tTable
deserializer
Deserializer deserializer
- These fields are all cached fields. The information comes from tTable.
outputFormatClass
Class<T> outputFormatClass
inputFormatClass
Class<T> inputFormatClass
uri
URI uri
storageHandler
HiveStorageHandler storageHandler
Package org.apache.hadoop.hive.ql.parse |
serialVersionUID: 1L
errors
ArrayList<E> errors
Class org.apache.hadoop.hive.ql.parse.SamplePruner extends Object implements Serializable |
serialVersionUID: 1L
Package org.apache.hadoop.hive.ql.plan |
serialVersionUID: 1L
tableName
String tableName
dbName
String dbName
location
String location
ifNotExists
boolean ifNotExists
partSpec
LinkedHashMap<K,V> partSpec
serialVersionUID: 1L
genericUDAFName
String genericUDAFName
genericUDAFEvaluator
GenericUDAFEvaluator genericUDAFEvaluator
- In case genericUDAFEvaluator is Serializable, we will serialize the object.
In case genericUDAFEvaluator does not implement Serializable, Java will
remember the class of genericUDAFEvaluator and creates a new instance when
deserialized. This is exactly what we want.
parameters
ArrayList<E> parameters
distinct
boolean distinct
mode
GenericUDAFEvaluator.Mode mode
serialVersionUID: 1L
op
AlterTableDesc.AlterTableTypes op
oldName
String oldName
newName
String newName
newCols
ArrayList<E> newCols
serdeName
String serdeName
props
HashMap<K,V> props
inputFormat
String inputFormat
outputFormat
String outputFormat
storageHandler
String storageHandler
numberBuckets
int numberBuckets
bucketColumns
ArrayList<E> bucketColumns
sortColumns
ArrayList<E> sortColumns
oldColName
String oldColName
newColName
String newColName
newColType
String newColType
newColComment
String newColComment
first
boolean first
afterCol
String afterCol
expectView
boolean expectView
tableName
String tableName
dbName
String dbName
partSpec
LinkedHashMap<K,V> partSpec
type
AlterTableDesc.AlterTableTypes type
serialVersionUID: 1L
tableName
String tableName
dbName
String dbName
partSpec
LinkedHashMap<K,V> partSpec
type
ArchiveWork.ArchiveActionType type
serialVersionUID: 1L
bufferSize
Integer bufferSize
serialVersionUID: 1L
serialVersionUID: 1L
listTasks
List<E> listTasks
dir
String dir
dpCtx
DynamicPartitionCtx dpCtx
serialVersionUID: 1L
serialVersionUID: 1L
dirToTaskMap
HashMap<K,V> dirToTaskMap
serialVersionUID: 1L
listWorks
List<E> listWorks
serialVersionUID: 1L
fromPath
String fromPath
toPath
String toPath
serialVersionUID: 1L
databaseName
String databaseName
locationUri
String locationUri
comment
String comment
ifNotExists
boolean ifNotExists
serialVersionUID: 1L
functionName
String functionName
className
String className
serialVersionUID: 1L
tableName
String tableName
isExternal
boolean isExternal
cols
ArrayList<E> cols
partCols
ArrayList<E> partCols
bucketCols
ArrayList<E> bucketCols
sortCols
ArrayList<E> sortCols
numBuckets
int numBuckets
fieldDelim
String fieldDelim
fieldEscape
String fieldEscape
collItemDelim
String collItemDelim
mapKeyDelim
String mapKeyDelim
lineDelim
String lineDelim
comment
String comment
inputFormat
String inputFormat
outputFormat
String outputFormat
location
String location
serName
String serName
storageHandler
String storageHandler
serdeProps
Map<K,V> serdeProps
tblProps
Map<K,V> tblProps
ifNotExists
boolean ifNotExists
serialVersionUID: 1L
tableName
String tableName
isExternal
boolean isExternal
location
String location
ifNotExists
boolean ifNotExists
likeTableName
String likeTableName
serialVersionUID: 1L
viewName
String viewName
originalText
String originalText
expandedText
String expandedText
schema
List<E> schema
tblProps
Map<K,V> tblProps
comment
String comment
ifNotExists
boolean ifNotExists
serialVersionUID: 1L
serialVersionUID: 1L
createDatabaseDesc
CreateDatabaseDesc createDatabaseDesc
switchDatabaseDesc
SwitchDatabaseDesc switchDatabaseDesc
dropDatabaseDesc
DropDatabaseDesc dropDatabaseDesc
createTblDesc
CreateTableDesc createTblDesc
createTblLikeDesc
CreateTableLikeDesc createTblLikeDesc
createVwDesc
CreateViewDesc createVwDesc
dropTblDesc
DropTableDesc dropTblDesc
alterTblDesc
AlterTableDesc alterTblDesc
showDatabasesDesc
ShowDatabasesDesc showDatabasesDesc
showTblsDesc
ShowTablesDesc showTblsDesc
showFuncsDesc
ShowFunctionsDesc showFuncsDesc
descFunctionDesc
DescFunctionDesc descFunctionDesc
showPartsDesc
ShowPartitionsDesc showPartsDesc
descTblDesc
DescTableDesc descTblDesc
addPartitionDesc
AddPartitionDesc addPartitionDesc
alterTblSimpleDesc
AlterTableSimpleDesc alterTblSimpleDesc
msckDesc
MsckDesc msckDesc
showTblStatusDesc
ShowTableStatusDesc showTblStatusDesc
inputs
HashSet<E> inputs
- ReadEntitites that are passed to the hooks.
outputs
HashSet<E> outputs
- List of WriteEntities that are passed to the hooks.
serialVersionUID: 1L
name
String name
resFile
String resFile
isExtended
boolean isExtended
serialVersionUID: 1L
tableName
String tableName
partSpec
HashMap<K,V> partSpec
resFile
String resFile
isExt
boolean isExt
serialVersionUID: 1L
databaseName
String databaseName
ifExists
boolean ifExists
serialVersionUID: 1L
functionName
String functionName
serialVersionUID: 1L
tableName
String tableName
partSpecs
ArrayList<E> partSpecs
expectView
boolean expectView
serialVersionUID: 1L
partSpec
Map<K,V> partSpec
numDPCols
int numDPCols
numSPCols
int numSPCols
spPath
String spPath
rootPath
String rootPath
numBuckets
int numBuckets
inputToDPCols
Map<K,V> inputToDPCols
spNames
List<E> spNames
dpNames
List<E> dpNames
defaultPartName
String defaultPartName
maxPartsPerNode
int maxPartsPerNode
serialVersionUID: 1L
resFile
String resFile
rootTasks
ArrayList<E> rootTasks
astStringTree
String astStringTree
extended
boolean extended
serialVersionUID: 1L
fieldName
String fieldName
position
int position
serialVersionUID: 1L
column
String column
- The column name.
tabAlias
String tabAlias
- The alias of the table.
isPartitionCol
boolean isPartitionCol
- Is the column a partitioned column.
serialVersionUID: 1L
value
Object value
serialVersionUID: 1L
typeInfo
TypeInfo typeInfo
serialVersionUID: 1L
desc
ExprNodeDesc desc
fieldName
String fieldName
isList
Boolean isList
serialVersionUID: 1L
genericUDF
GenericUDF genericUDF
- In case genericUDF is Serializable, we will serialize the object.
In case genericUDF does not implement Serializable, Java will remember the
class of genericUDF and creates a new instance when deserialized. This is
exactly what we want.
childExprs
List<E> childExprs
serialVersionUID: 1L
serialVersionUID: 1L
col
ExprNodeDesc col
serialVersionUID: 1L
tblDir
String tblDir
tblDesc
TableDesc tblDesc
partDir
ArrayList<E> partDir
partDesc
ArrayList<E> partDesc
limit
int limit
serializationNullFormat
String serializationNullFormat
- Serialization Null Format for the serde used to fetch data.
serialVersionUID: 1L
dirName
String dirName
tableInfo
TableDesc tableInfo
compressed
boolean compressed
destTableId
int destTableId
compressCodec
String compressCodec
compressType
String compressType
multiFileSpray
boolean multiFileSpray
totalFiles
int totalFiles
partitionCols
ArrayList<E> partitionCols
numFiles
int numFiles
dpCtx
DynamicPartitionCtx dpCtx
serialVersionUID: 1L
predicate
ExprNodeDesc predicate
isSamplingPred
boolean isSamplingPred
serialVersionUID: 1L
serialVersionUID: 1L
createFunctionDesc
CreateFunctionDesc createFunctionDesc
dropFunctionDesc
DropFunctionDesc dropFunctionDesc
serialVersionUID: 1L
mode
GroupByDesc.Mode mode
groupKeyNotReductionKey
boolean groupKeyNotReductionKey
bucketGroup
boolean bucketGroup
keys
ArrayList<E> keys
aggregators
ArrayList<E> aggregators
outputColumnNames
ArrayList<E> outputColumnNames
serialVersionUID: 1L
left
int left
right
int right
type
int type
preserved
boolean preserved
serialVersionUID: 1L
handleSkewJoin
boolean handleSkewJoin
skewKeyDefinition
int skewKeyDefinition
bigKeysDirMap
Map<K,V> bigKeysDirMap
smallKeysDirMap
Map<K,V> smallKeysDirMap
skewKeysValuesTables
Map<K,V> skewKeysValuesTables
exprs
Map<K,V> exprs
outputColumnNames
List<E> outputColumnNames
noOuterJoin
boolean noOuterJoin
conds
JoinCondDesc[] conds
tagOrder
Byte[] tagOrder
keyTableDesc
TableDesc keyTableDesc
serialVersionUID: 1L
serialVersionUID: 1L
outputInternalColNames
ArrayList<E> outputInternalColNames
serialVersionUID: 1L
limit
int limit
serialVersionUID: 1L
sourceDir
String sourceDir
serialVersionUID: 1L
targetDir
String targetDir
isDfsDir
boolean isDfsDir
columns
String columns
columnTypes
String columnTypes
serialVersionUID: 1L
replace
boolean replace
tmpDir
String tmpDir
dpCtx
DynamicPartitionCtx dpCtx
table
TableDesc table
partitionSpec
Map<K,V> partitionSpec
serialVersionUID: 1L
keys
Map<K,V> keys
keyTblDesc
TableDesc keyTblDesc
valueTblDescs
List<E> valueTblDescs
posBigTable
int posBigTable
retainList
Map<K,V> retainList
aliasBucketFileNameMapping
LinkedHashMap<K,V> aliasBucketFileNameMapping
bucketFileNameMapping
LinkedHashMap<K,V> bucketFileNameMapping
serialVersionUID: 1L
aliasToWork
LinkedHashMap<K,V> aliasToWork
aliasToFetchWork
LinkedHashMap<K,V> aliasToFetchWork
inputFileChangeSensitive
boolean inputFileChangeSensitive
bucketMapjoinContext
MapredLocalWork.BucketMapJoinContext bucketMapjoinContext
serialVersionUID: 1L
aliasBucketFileNameMapping
LinkedHashMap<K,V> aliasBucketFileNameMapping
mapJoinBigTableAlias
String mapJoinBigTableAlias
bucketMatcherClass
Class<T> bucketMatcherClass
aliasBucketBaseFileNameMapping
LinkedHashMap<K,V> aliasBucketBaseFileNameMapping
bucketFileNameMapping
LinkedHashMap<K,V> bucketFileNameMapping
serialVersionUID: 1L
command
String command
pathToAliases
LinkedHashMap<K,V> pathToAliases
pathToPartitionInfo
LinkedHashMap<K,V> pathToPartitionInfo
aliasToWork
LinkedHashMap<K,V> aliasToWork
aliasToPartnInfo
LinkedHashMap<K,V> aliasToPartnInfo
keyDesc
TableDesc keyDesc
tagToValueDesc
List<E> tagToValueDesc
reducer
Operator<T extends Serializable> reducer
numReduceTasks
Integer numReduceTasks
numMapTasks
Integer numMapTasks
minSplitSize
Integer minSplitSize
needsTagging
boolean needsTagging
hadoopSupportsSplittable
boolean hadoopSupportsSplittable
mapLocalWork
MapredLocalWork mapLocalWork
inputformat
String inputformat
serialVersionUID: 1L
loadTableWork
LoadTableDesc loadTableWork
loadFileWork
LoadFileDesc loadFileWork
checkFileFormat
boolean checkFileFormat
dpSpecPaths
ArrayList<E> dpSpecPaths
inputs
HashSet<E> inputs
- ReadEntitites that are passed to the hooks.
outputs
HashSet<E> outputs
- List of WriteEntities that are passed to the hooks.
movedParts
List<E> movedParts
- List of inserted partitions
tableName
String tableName
partSpecs
ArrayList<E> partSpecs
resFile
String resFile
repairPartitions
boolean repairPartitions
serialVersionUID: 2L
tableDesc
TableDesc tableDesc
partSpec
LinkedHashMap<K,V> partSpec
deserializerClass
Class<T> deserializerClass
inputFileFormatClass
Class<T> inputFileFormatClass
outputFileFormatClass
Class<T> outputFileFormatClass
properties
Properties properties
serdeClassName
String serdeClassName
serialVersionUID: 1L
keyCols
ArrayList<E> keyCols
- Key columns are passed to reducer in the "key".
outputKeyColumnNames
ArrayList<E> outputKeyColumnNames
valueCols
ArrayList<E> valueCols
- Value columns are passed to reducer in the "value".
outputValueColumnNames
ArrayList<E> outputValueColumnNames
keySerializeInfo
TableDesc keySerializeInfo
- Describe how to serialize the key.
valueSerializeInfo
TableDesc valueSerializeInfo
- Describe how to serialize the value.
tag
int tag
- The tag for this reducesink descriptor.
partitionCols
ArrayList<E> partitionCols
- The partition columns (CLUSTER BY or DISTRIBUTE BY in Hive language).
Partition columns decide the reducer that the current row goes to.
Partition columns are not passed to reducer.
numReducers
int numReducers
serialVersionUID: 1L
schema
String schema
serialVersionUID: 1L
scriptCmd
String scriptCmd
scriptOutputInfo
TableDesc scriptOutputInfo
inRecordWriterClass
Class<T> inRecordWriterClass
scriptInputInfo
TableDesc scriptInputInfo
outRecordReaderClass
Class<T> outRecordReaderClass
scriptErrInfo
TableDesc scriptErrInfo
errRecordReaderClass
Class<T> errRecordReaderClass
serialVersionUID: 1L
colList
ArrayList<E> colList
outputColumnNames
ArrayList<E> outputColumnNames
selectStar
boolean selectStar
selStarNoCompute
boolean selStarNoCompute
serialVersionUID: 1L
pattern
String pattern
resFile
String resFile
serialVersionUID: 1L
pattern
String pattern
resFile
String resFile
serialVersionUID: 1L
tabName
String tabName
resFile
String resFile
partSpec
Map<K,V> partSpec
serialVersionUID: 1L
pattern
String pattern
resFile
String resFile
serialVersionUID: 1L
pattern
String pattern
resFile
String resFile
dbName
String dbName
partSpec
HashMap<K,V> partSpec
serialVersionUID: 1L
localWork
MapredLocalWork localWork
tagToAlias
HashMap<K,V> tagToAlias
serialVersionUID: 1L
databaseName
String databaseName
serialVersionUID: 1L
deserializerClass
Class<T> deserializerClass
inputFileFormatClass
Class<T> inputFileFormatClass
outputFileFormatClass
Class<T> outputFileFormatClass
properties
Properties properties
serdeClassName
String serdeClassName
jobProperties
Map<K,V> jobProperties
serialVersionUID: 1L
alias
String alias
Class org.apache.hadoop.hive.ql.plan.TouchDesc extends Object implements Serializable |
serialVersionUID: 1L
genericUDTF
GenericUDTF genericUDTF
serialVersionUID: 1L
Package org.apache.hadoop.hive.ql.udf.generic |
serialVersionUID: 1L
udafEvaluator
Class<T> udafEvaluator
udfName
String udfName
- The name of the UDF.
isOperator
boolean isOperator
- Whether the UDF is an operator or not. This controls how the display string
is generated.
udfClass
Class<T> udfClass
- The underlying UDF class.
Package org.apache.hadoop.hive.ql.util.jdbm.helper |
serialVersionUID: 1L
serialVersionUID: 1L
_nested
Exception _nested
- Nested exception -- the original exception that occured, if any.
serialVersionUID: 1L
serialVersionUID: 1L
serialVersionUID: 1L
_comparator
Comparator<T> _comparator
- Wrapped comparator.
serialVersionUID: 1L
_except
Exception _except
- The underlying exception.
Package org.apache.hadoop.hive.ql.util.jdbm.recman |
serialVersionUID: 2L
readExternal
public void readExternal(ObjectInput in)
throws IOException,
ClassNotFoundException
- Throws:
IOException
ClassNotFoundException
writeExternal
public void writeExternal(ObjectOutput out)
throws IOException
- Throws:
IOException
Package org.apache.hadoop.hive.serde.test |
Class org.apache.hadoop.hive.serde.test.Constants extends Object implements Serializable |
field0
int field0
__isset
org.apache.hadoop.hive.serde.test.InnerStruct.Isset __isset
field1
int field1
field2
String field2
field3
List<E> field3
__isset
org.apache.hadoop.hive.serde.test.ThriftTestObj.Isset __isset
Package org.apache.hadoop.hive.serde2 |
serialVersionUID: 1L
Package org.apache.hadoop.hive.serde2.dynamic_type |
type_name
String type_name
bt
DynamicSerDeStructBase bt
tios
org.apache.thrift.transport.TIOStreamTransport tios
deserializeReuse
Object deserializeReuse
ret
org.apache.hadoop.io.BytesWritable ret
types_by_id
Map<K,V> types_by_id
types_by_column_name
Map<K,V> types_by_column_name
ordered_types
DynamicSerDeTypeBase[] ordered_types
ordered_column_id_by_name
Map<K,V> ordered_column_id_by_name
isRealThrift
boolean isRealThrift
- Indicates whether fields can be out of order or missing. i.e., is it really
real thrift serialization. This is used by dynamicserde to do some
optimizations if it knows all the fields exist and are required and are
serialized in order. For now, those optimizations are only done for
DynamicSerDe serialized data so always set to false for now.
fieldsPresent
boolean[] fieldsPresent
field
org.apache.thrift.protocol.TField field
FD_FIELD_LIST
int FD_FIELD_LIST
fieldList
DynamicSerDeFieldList fieldList
serialVersionUID: 1L
FD_KEYTYPE
byte FD_KEYTYPE
FD_VALUETYPE
byte FD_VALUETYPE
serializeMap
org.apache.thrift.protocol.TMap serializeMap
tset
org.apache.thrift.protocol.TSet tset
- NOTE: Set is not supported by Hive yet. The code uses ListObjectInspector
right now. We need to change it to SetObjectInspector when that is done.
specialConstructor
boolean specialConstructor
- This variable determines which constructor was used to create this object
and thereby affects the semantics of the "getMessage" method (see below).
currentToken
Token currentToken
- This is the last token that has been consumed successfully. If this object
has been created due to a parse error, the token followng this token will
(therefore) be the first error token.
expectedTokenSequences
int[][] expectedTokenSequences
- Each entry in this array is an array of integers. Each array of integers
represents a sequence of tokens (by their ordinal values) that is expected
at this point of the parse.
tokenImage
String[] tokenImage
- This is a reference to the "tokenImage" array of the generated parser
within which the parse error occurred. This array is defined in the
generated ...Constants interface.
eol
String eol
- The end of line string for this machine.
errorCode
int errorCode
- Indicates the reason why the exception is thrown. It will have one of the
above 4 values.
Package org.apache.hadoop.hive.serde2.thrift.test |
aint
int aint
aString
String aString
lint
List<E> lint
lString
List<E> lString
lintString
List<E> lintString
mStringString
Map<K,V> mStringString
__isset
org.apache.hadoop.hive.serde2.thrift.test.Complex.Isset __isset
myint
int myint
myString
String myString
underscore_int
int underscore_int
__isset
org.apache.hadoop.hive.serde2.thrift.test.IntString.Isset __isset
Package org.apache.hadoop.hive.serde2.typeinfo |
serialVersionUID: 1L
listElementTypeInfo
TypeInfo listElementTypeInfo
serialVersionUID: 1L
mapKeyTypeInfo
TypeInfo mapKeyTypeInfo
mapValueTypeInfo
TypeInfo mapValueTypeInfo
serialVersionUID: 1L
typeName
String typeName
serialVersionUID: 1L
allStructFieldNames
ArrayList<E> allStructFieldNames
allStructFieldTypeInfos
ArrayList<E> allStructFieldTypeInfos
serialVersionUID: 1L
Package org.apache.hadoop.hive.service |
taskTrackers
int taskTrackers
mapTasks
int mapTasks
reduceTasks
int reduceTasks
maxMapTasks
int maxMapTasks
maxReduceTasks
int maxReduceTasks
state
int state
__isset
org.apache.hadoop.hive.service.HiveClusterStatus.Isset __isset
message
String message
errorCode
int errorCode
SQLState
String SQLState
__isset
org.apache.hadoop.hive.service.HiveServerException.Isset __isset
query
String query
__isset
org.apache.hadoop.hive.service.ThriftHive.execute_args.Isset __isset
ex
HiveServerException ex
__isset
org.apache.hadoop.hive.service.ThriftHive.execute_result.Isset __isset
success
List<E> success
ex
HiveServerException ex
__isset
org.apache.hadoop.hive.service.ThriftHive.fetchAll_result.Isset __isset
numRows
int numRows
__isset
org.apache.hadoop.hive.service.ThriftHive.fetchN_args.Isset __isset
success
List<E> success
ex
HiveServerException ex
__isset
org.apache.hadoop.hive.service.ThriftHive.fetchN_result.Isset __isset
success
String success
ex
HiveServerException ex
__isset
org.apache.hadoop.hive.service.ThriftHive.fetchOne_result.Isset __isset
success
HiveClusterStatus success
ex
HiveServerException ex
__isset
org.apache.hadoop.hive.service.ThriftHive.getClusterStatus_result.Isset __isset
success
org.apache.hadoop.hive.ql.plan.api.QueryPlan success
ex
HiveServerException ex
__isset
org.apache.hadoop.hive.service.ThriftHive.getQueryPlan_result.Isset __isset
success
Schema success
ex
HiveServerException ex
__isset
org.apache.hadoop.hive.service.ThriftHive.getSchema_result.Isset __isset
success
Schema success
ex
HiveServerException ex
__isset
org.apache.hadoop.hive.service.ThriftHive.getThriftSchema_result.Isset __isset
Copyright © 2010 The Apache Software Foundation