|
||||||||||
PREV NEXT | FRAMES NO FRAMES |
Packages that use TableDesc | |
---|---|
org.apache.hadoop.hive.ql.exec | Hive QL execution tasks, operators, functions and other handlers. |
org.apache.hadoop.hive.ql.exec.persistence | |
org.apache.hadoop.hive.ql.io | |
org.apache.hadoop.hive.ql.metadata | |
org.apache.hadoop.hive.ql.optimizer | |
org.apache.hadoop.hive.ql.plan |
Uses of TableDesc in org.apache.hadoop.hive.ql.exec |
---|
Fields in org.apache.hadoop.hive.ql.exec declared as TableDesc | |
---|---|
static TableDesc |
Utilities.defaultTd
|
Fields in org.apache.hadoop.hive.ql.exec with type parameters of type TableDesc | |
---|---|
protected Map<Byte,TableDesc> |
CommonJoinOperator.spillTableDesc
|
Methods in org.apache.hadoop.hive.ql.exec that return TableDesc | |
---|---|
TableDesc |
FetchOperator.getCurrTbl()
|
TableDesc |
CommonJoinOperator.getSpillTableDesc(Byte alias)
|
static TableDesc |
Utilities.getTableDesc(String cols,
String colTypes)
|
static TableDesc |
Utilities.getTableDesc(Table tbl)
|
TableDesc |
FetchTask.getTblDesc()
Return the tableDesc of the fetchWork. |
TableDesc |
MapJoinOperator.MapJoinObjectCtx.getTblDesc()
|
Methods in org.apache.hadoop.hive.ql.exec that return types with arguments of type TableDesc | |
---|---|
Map<Byte,TableDesc> |
CommonJoinOperator.getSpillTableDesc()
|
Methods in org.apache.hadoop.hive.ql.exec with parameters of type TableDesc | |
---|---|
static void |
Utilities.copyTableJobPropertiesToConf(TableDesc tbl,
org.apache.hadoop.mapred.JobConf job)
Copies the storage handler properties configured for a table descriptor to a runtime job configuration. |
void |
FetchOperator.setCurrTbl(TableDesc currTbl)
|
Constructors in org.apache.hadoop.hive.ql.exec with parameters of type TableDesc | |
---|---|
MapJoinOperator.MapJoinObjectCtx(ObjectInspector standardOI,
SerDe serde,
TableDesc tblDesc,
org.apache.hadoop.conf.Configuration conf)
|
Uses of TableDesc in org.apache.hadoop.hive.ql.exec.persistence |
---|
Methods in org.apache.hadoop.hive.ql.exec.persistence with parameters of type TableDesc | |
---|---|
void |
RowContainer.setTableDesc(TableDesc tblDesc)
|
Uses of TableDesc in org.apache.hadoop.hive.ql.io |
---|
Methods in org.apache.hadoop.hive.ql.io with parameters of type TableDesc | |
---|---|
static FileSinkOperator.RecordWriter |
HiveFileFormatUtils.getHiveRecordWriter(org.apache.hadoop.mapred.JobConf jc,
TableDesc tableInfo,
Class<? extends org.apache.hadoop.io.Writable> outputClass,
FileSinkDesc conf,
org.apache.hadoop.fs.Path outPath)
|
Uses of TableDesc in org.apache.hadoop.hive.ql.metadata |
---|
Methods in org.apache.hadoop.hive.ql.metadata with parameters of type TableDesc | |
---|---|
void |
HiveStorageHandler.configureTableJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
Configures properties for a job based on the definition of the source or target table it accesses. |
void |
DefaultStorageHandler.configureTableJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
|
Uses of TableDesc in org.apache.hadoop.hive.ql.optimizer |
---|
Methods in org.apache.hadoop.hive.ql.optimizer that return TableDesc | |
---|---|
TableDesc |
GenMRProcContext.GenMRMapJoinCtx.getTTDesc()
|
Methods in org.apache.hadoop.hive.ql.optimizer that return types with arguments of type TableDesc | |
---|---|
List<TableDesc> |
GenMRProcContext.GenMRUnionCtx.getTTDesc()
|
Methods in org.apache.hadoop.hive.ql.optimizer with parameters of type TableDesc | |
---|---|
void |
GenMRProcContext.GenMRUnionCtx.addTTDesc(TableDesc tt_desc)
|
static void |
GenMapRedUtils.setTaskPlan(String path,
String alias,
Operator<? extends Serializable> topOp,
MapredWork plan,
boolean local,
TableDesc tt_desc)
set the current task in the mapredWork. |
void |
GenMRProcContext.GenMRMapJoinCtx.setTTDesc(TableDesc tt_desc)
|
Constructors in org.apache.hadoop.hive.ql.optimizer with parameters of type TableDesc | |
---|---|
GenMRProcContext.GenMRMapJoinCtx(String taskTmpDir,
TableDesc tt_desc,
Operator<? extends Serializable> rootMapJoinOp,
AbstractMapJoinOperator<? extends MapJoinDesc> oldMapJoin)
|
Uses of TableDesc in org.apache.hadoop.hive.ql.plan |
---|
Methods in org.apache.hadoop.hive.ql.plan that return TableDesc | |
---|---|
static TableDesc |
PlanUtils.getDefaultTableDesc(String separatorCode)
Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode. |
static TableDesc |
PlanUtils.getDefaultTableDesc(String separatorCode,
String columns)
Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode and column names (comma separated string). |
static TableDesc |
PlanUtils.getDefaultTableDesc(String separatorCode,
String columns,
boolean lastColumnTakesRestOfTheLine)
Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode and column names (comma separated string), and whether the last column should take the rest of the line. |
static TableDesc |
PlanUtils.getDefaultTableDesc(String separatorCode,
String columns,
String columnTypes,
boolean lastColumnTakesRestOfTheLine)
Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode and column names (comma separated string), and whether the last column should take the rest of the line. |
static TableDesc |
PlanUtils.getIntermediateFileTableDesc(List<FieldSchema> fieldSchemas)
Generate the table descriptor for intermediate files. |
TableDesc |
MapredWork.getKeyDesc()
|
TableDesc |
ReduceSinkDesc.getKeySerializeInfo()
|
TableDesc |
JoinDesc.getKeyTableDesc()
|
TableDesc |
MapJoinDesc.getKeyTblDesc()
|
static TableDesc |
PlanUtils.getMapJoinKeyTableDesc(List<FieldSchema> fieldSchemas)
Generate the table descriptor for Map-side join key. |
static TableDesc |
PlanUtils.getMapJoinValueTableDesc(List<FieldSchema> fieldSchemas)
Generate the table descriptor for Map-side join key. |
static TableDesc |
PlanUtils.getReduceKeyTableDesc(List<FieldSchema> fieldSchemas,
String order)
Generate the table descriptor for reduce key. |
static TableDesc |
PlanUtils.getReduceValueTableDesc(List<FieldSchema> fieldSchemas)
Generate the table descriptor for intermediate files. |
TableDesc |
ScriptDesc.getScriptErrInfo()
|
TableDesc |
ScriptDesc.getScriptInputInfo()
|
TableDesc |
ScriptDesc.getScriptOutputInfo()
|
TableDesc |
LoadTableDesc.getTable()
|
TableDesc |
PartitionDesc.getTableDesc()
|
static TableDesc |
PlanUtils.getTableDesc(Class<? extends Deserializer> serdeClass,
String separatorCode,
String columns)
Generate the table descriptor of given serde with the separatorCode and column names (comma separated string). |
static TableDesc |
PlanUtils.getTableDesc(Class<? extends Deserializer> serdeClass,
String separatorCode,
String columns,
boolean lastColumnTakesRestOfTheLine)
Generate the table descriptor of the serde specified with the separatorCode and column names (comma separated string), and whether the last column should take the rest of the line. |
static TableDesc |
PlanUtils.getTableDesc(Class<? extends Deserializer> serdeClass,
String separatorCode,
String columns,
String columnTypes,
boolean lastColumnTakesRestOfTheLine)
|
static TableDesc |
PlanUtils.getTableDesc(Class<? extends Deserializer> serdeClass,
String separatorCode,
String columns,
String columnTypes,
boolean lastColumnTakesRestOfTheLine,
boolean useJSONForLazy)
|
static TableDesc |
PlanUtils.getTableDesc(CreateTableDesc crtTblDesc,
String cols,
String colTypes)
Generate a table descriptor from a createTableDesc. |
TableDesc |
FileSinkDesc.getTableInfo()
|
TableDesc |
FetchWork.getTblDesc()
|
TableDesc |
ReduceSinkDesc.getValueSerializeInfo()
|
Methods in org.apache.hadoop.hive.ql.plan that return types with arguments of type TableDesc | |
---|---|
Map<Byte,TableDesc> |
JoinDesc.getSkewKeysValuesTables()
|
List<TableDesc> |
MapredWork.getTagToValueDesc()
|
List<TableDesc> |
MapJoinDesc.getValueTblDescs()
|
Methods in org.apache.hadoop.hive.ql.plan with parameters of type TableDesc | |
---|---|
static void |
PlanUtils.configureTableJobPropertiesForStorageHandler(TableDesc tableDesc)
Loads the storage handler (if one exists) for the given table and invokes HiveStorageHandler.configureTableJobProperties(org.apache.hadoop.hive.ql.plan.TableDesc, java.util.Map . |
void |
MapredWork.setKeyDesc(TableDesc keyDesc)
|
void |
ReduceSinkDesc.setKeySerializeInfo(TableDesc keySerializeInfo)
|
void |
JoinDesc.setKeyTableDesc(TableDesc keyTblDesc)
|
void |
MapJoinDesc.setKeyTblDesc(TableDesc keyTblDesc)
|
void |
ScriptDesc.setScriptErrInfo(TableDesc scriptErrInfo)
|
void |
ScriptDesc.setScriptInputInfo(TableDesc scriptInputInfo)
|
void |
ScriptDesc.setScriptOutputInfo(TableDesc scriptOutputInfo)
|
void |
LoadTableDesc.setTable(TableDesc table)
|
void |
PartitionDesc.setTableDesc(TableDesc tableDesc)
|
void |
FileSinkDesc.setTableInfo(TableDesc tableInfo)
|
void |
FetchWork.setTblDesc(TableDesc tblDesc)
|
void |
ReduceSinkDesc.setValueSerializeInfo(TableDesc valueSerializeInfo)
|
Method parameters in org.apache.hadoop.hive.ql.plan with type arguments of type TableDesc | |
---|---|
void |
JoinDesc.setSkewKeysValuesTables(Map<Byte,TableDesc> skewKeysValuesTables)
|
void |
MapredWork.setTagToValueDesc(List<TableDesc> tagToValueDesc)
|
void |
MapJoinDesc.setValueTblDescs(List<TableDesc> valueTblDescs)
|
Constructors in org.apache.hadoop.hive.ql.plan with parameters of type TableDesc | |
---|---|
FetchWork(String tblDir,
TableDesc tblDesc)
|
|
FetchWork(String tblDir,
TableDesc tblDesc,
int limit)
|
|
FileSinkDesc(String dirName,
TableDesc tableInfo,
boolean compressed)
|
|
FileSinkDesc(String dirName,
TableDesc tableInfo,
boolean compressed,
int destTableId,
boolean multiFileSpray,
int numFiles,
int totalFiles,
ArrayList<ExprNodeDesc> partitionCols,
DynamicPartitionCtx dpCtx)
|
|
LoadTableDesc(String sourceDir,
String tmpDir,
TableDesc table,
DynamicPartitionCtx dpCtx)
|
|
LoadTableDesc(String sourceDir,
String tmpDir,
TableDesc table,
Map<String,String> partitionSpec)
|
|
LoadTableDesc(String sourceDir,
String tmpDir,
TableDesc table,
Map<String,String> partitionSpec,
boolean replace)
|
|
MapJoinDesc(Map<Byte,List<ExprNodeDesc>> keys,
TableDesc keyTblDesc,
Map<Byte,List<ExprNodeDesc>> values,
List<TableDesc> valueTblDescs,
List<String> outputColumnNames,
int posBigTable,
JoinCondDesc[] conds)
|
|
MapredWork(String command,
LinkedHashMap<String,ArrayList<String>> pathToAliases,
LinkedHashMap<String,PartitionDesc> pathToPartitionInfo,
LinkedHashMap<String,Operator<? extends Serializable>> aliasToWork,
TableDesc keyDesc,
List<TableDesc> tagToValueDesc,
Operator<?> reducer,
Integer numReduceTasks,
MapredLocalWork mapLocalWork,
boolean hadoopSupportsSplittable)
|
|
PartitionDesc(TableDesc table,
LinkedHashMap<String,String> partSpec)
|
|
PartitionDesc(TableDesc table,
LinkedHashMap<String,String> partSpec,
Class<? extends Deserializer> serdeClass,
Class<? extends org.apache.hadoop.mapred.InputFormat> inputFileFormatClass,
Class<?> outputFormat,
Properties properties,
String serdeClassName)
|
|
ReduceSinkDesc(ArrayList<ExprNodeDesc> keyCols,
ArrayList<ExprNodeDesc> valueCols,
ArrayList<String> outputKeyColumnNames,
ArrayList<String> outputValueolumnNames,
int tag,
ArrayList<ExprNodeDesc> partitionCols,
int numReducers,
TableDesc keySerializeInfo,
TableDesc valueSerializeInfo)
|
|
ScriptDesc(String scriptCmd,
TableDesc scriptInputInfo,
Class<? extends RecordWriter> inRecordWriterClass,
TableDesc scriptOutputInfo,
Class<? extends RecordReader> outRecordReaderClass,
Class<? extends RecordReader> errRecordReaderClass,
TableDesc scriptErrInfo)
|
Constructor parameters in org.apache.hadoop.hive.ql.plan with type arguments of type TableDesc | |
---|---|
MapJoinDesc(Map<Byte,List<ExprNodeDesc>> keys,
TableDesc keyTblDesc,
Map<Byte,List<ExprNodeDesc>> values,
List<TableDesc> valueTblDescs,
List<String> outputColumnNames,
int posBigTable,
JoinCondDesc[] conds)
|
|
MapredWork(String command,
LinkedHashMap<String,ArrayList<String>> pathToAliases,
LinkedHashMap<String,PartitionDesc> pathToPartitionInfo,
LinkedHashMap<String,Operator<? extends Serializable>> aliasToWork,
TableDesc keyDesc,
List<TableDesc> tagToValueDesc,
Operator<?> reducer,
Integer numReduceTasks,
MapredLocalWork mapLocalWork,
boolean hadoopSupportsSplittable)
|
|
||||||||||
PREV NEXT | FRAMES NO FRAMES |