|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |
java.lang.Objectorg.apache.hadoop.hive.ql.exec.Utilities
public final class Utilities
Utilities.
Nested Class Summary | |
---|---|
static class |
Utilities.CollectionPersistenceDelegate
|
static class |
Utilities.EnumDelegate
Java 1.5 workaround. |
static class |
Utilities.ListDelegate
|
static class |
Utilities.MapDelegate
|
static class |
Utilities.ReduceField
ReduceField. |
static class |
Utilities.SetDelegate
|
static class |
Utilities.StreamPrinter
StreamPrinter. |
static class |
Utilities.StreamStatus
StreamStatus. |
static class |
Utilities.Tuple<T,V>
Tuple. |
Field Summary | |
---|---|
static int |
ctrlaCode
|
static TableDesc |
defaultTd
|
static String |
HADOOP_LOCAL_FS
The object in the reducer are composed of these top level fields. |
static String |
INDENT
|
static int |
newLineCode
|
static String |
NSTR
|
static String |
nullStringOutput
|
static String |
nullStringStorage
|
static Random |
randGen
|
static int |
tabCode
|
Method Summary | |
---|---|
static String |
abbreviate(String str,
int max)
convert "From src insert blah blah" to "From src insert ... |
static void |
addMapWork(MapredWork mr,
Table tbl,
String alias,
Operator<?> work)
|
static ClassLoader |
addToClassPath(ClassLoader cloader,
String[] newPaths)
Add new elements to the classpath. |
static void |
clearMapRedWork(org.apache.hadoop.conf.Configuration job)
|
static boolean |
contentsEqual(InputStream is1,
InputStream is2,
boolean ignoreWhitespace)
|
static void |
copyTableJobPropertiesToConf(TableDesc tbl,
org.apache.hadoop.mapred.JobConf job)
Copies the storage handler properties configured for a table descriptor to a runtime job configuration. |
static OutputStream |
createCompressedStream(org.apache.hadoop.mapred.JobConf jc,
OutputStream out)
Convert an output stream to a compressed output stream based on codecs and compression options specified in the Job Configuration. |
static OutputStream |
createCompressedStream(org.apache.hadoop.mapred.JobConf jc,
OutputStream out,
boolean isCompressed)
Convert an output stream to a compressed output stream based on codecs codecs in the Job Configuration. |
static RCFile.Writer |
createRCFileWriter(org.apache.hadoop.mapred.JobConf jc,
org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path file,
boolean isCompressed)
Create a RCFile output stream based on job configuration Uses user supplied compression flag (rather than obtaining it from the Job Configuration). |
static org.apache.hadoop.io.SequenceFile.Writer |
createSequenceWriter(org.apache.hadoop.mapred.JobConf jc,
org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path file,
Class<?> keyClass,
Class<?> valClass)
Create a sequencefile output stream based on job configuration. |
static org.apache.hadoop.io.SequenceFile.Writer |
createSequenceWriter(org.apache.hadoop.mapred.JobConf jc,
org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path file,
Class<?> keyClass,
Class<?> valClass,
boolean isCompressed)
Create a sequencefile output stream based on job configuration Uses user supplied compression flag (rather than obtaining it from the Job Configuration). |
static MapredWork |
deserializeMapRedWork(InputStream in,
org.apache.hadoop.conf.Configuration conf)
|
static QueryPlan |
deserializeQueryPlan(InputStream in,
org.apache.hadoop.conf.Configuration conf)
Deserialize the whole query plan. |
static String |
formatBinaryString(byte[] array,
int start,
int length)
|
static List<String> |
getColumnNames(Properties props)
|
static List<String> |
getColumnNamesFromFieldSchema(List<FieldSchema> partCols)
|
static List<String> |
getColumnNamesFromSortCols(List<Order> sortCols)
|
static List<String> |
getColumnTypes(Properties props)
|
static int |
getDefaultNotificationInterval(org.apache.hadoop.conf.Configuration hconf)
Gets the default notification interval to send progress updates to the tracker. |
static List<String> |
getFieldSchemaString(List<FieldSchema> fl)
|
static String |
getFileExtension(org.apache.hadoop.mapred.JobConf jc,
boolean isCompressed)
Based on compression option and configured output codec - get extension for output file. |
static org.apache.hadoop.fs.FileStatus[] |
getFileStatusRecurse(org.apache.hadoop.fs.Path path,
int level,
org.apache.hadoop.fs.FileSystem fs)
Get all file status from a root path and recursively go deep into certain levels. |
static MapredWork |
getMapRedWork(org.apache.hadoop.conf.Configuration job)
|
static String |
getNameMessage(Exception e)
|
static String |
getOpTreeSkel(Operator<?> op)
|
static PartitionDesc |
getPartitionDesc(Partition part)
|
static TableDesc |
getTableDesc(String cols,
String colTypes)
|
static TableDesc |
getTableDesc(Table tbl)
|
static String |
getTaskId(org.apache.hadoop.conf.Configuration hconf)
Gets the task id if we are running as a Hadoop job. |
static String |
getTaskIdFromFilename(String filename)
Get the task id from the filename. |
static boolean |
isTempPath(org.apache.hadoop.fs.FileStatus file)
Detect if the supplied file is a temporary path. |
static ArrayList |
makeList(Object... olist)
|
static HashMap |
makeMap(Object... olist)
|
static Properties |
makeProperties(String... olist)
|
static List<String> |
mergeUniqElems(List<String> src,
List<String> dest)
|
static Utilities.StreamStatus |
readColumn(DataInput in,
OutputStream out)
|
static String |
realFile(String newFile,
org.apache.hadoop.conf.Configuration conf)
Shamelessly cloned from GenericOptionsParser. |
static void |
removeFromClassPath(String[] pathsToRemove)
remove elements from the classpath. |
static HashMap<String,org.apache.hadoop.fs.FileStatus> |
removeTempOrDuplicateFiles(org.apache.hadoop.fs.FileStatus[] items,
org.apache.hadoop.fs.FileSystem fs)
|
static void |
removeTempOrDuplicateFiles(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path path)
Remove all temporary files and duplicate (double-committed) files from a given directory. |
static ArrayList<String> |
removeTempOrDuplicateFiles(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path path,
DynamicPartitionCtx dpCtx)
Remove all temporary files and duplicate (double-committed) files from a given directory. |
static void |
rename(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path src,
org.apache.hadoop.fs.Path dst)
Rename src to dst, or in the case dst already exists, move files in src to dst. |
static void |
renameOrMoveFiles(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path src,
org.apache.hadoop.fs.Path dst)
Rename src to dst, or in the case dst already exists, move files in src to dst. |
static String |
replaceTaskId(String taskId,
int bucketNum)
|
static String |
replaceTaskIdFromFilename(String filename,
int bucketNum)
Replace the task id from the filename. |
static String |
replaceTaskIdFromFilename(String filename,
String oldTaskId,
String newTaskId)
Replace the oldTaskId appearing in the filename by the newTaskId. |
static void |
serializeMapRedWork(MapredWork w,
OutputStream out)
Serialize the mapredWork object to an output stream. |
static void |
serializeQueryPlan(QueryPlan plan,
OutputStream out)
Serialize the whole query plan. |
static void |
serializeTasks(Task<? extends Serializable> t,
OutputStream out)
Serialize a single Task. |
static void |
setMapRedWork(org.apache.hadoop.conf.Configuration job,
MapredWork w,
String hiveScratchDir)
|
static boolean |
supportCombineFileInputFormat()
|
static org.apache.hadoop.fs.Path |
toTempPath(org.apache.hadoop.fs.Path orig)
|
static org.apache.hadoop.fs.Path |
toTempPath(String orig)
Given a path, convert to a temporary path. |
static void |
validateColumnNames(List<String> colNames,
List<String> checkCols)
|
Methods inherited from class java.lang.Object |
---|
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait |
Field Detail |
---|
public static String HADOOP_LOCAL_FS
public static TableDesc defaultTd
public static final int newLineCode
public static final int tabCode
public static final int ctrlaCode
public static final String INDENT
public static String nullStringStorage
public static String nullStringOutput
public static Random randGen
public static final String NSTR
Method Detail |
---|
public static void clearMapRedWork(org.apache.hadoop.conf.Configuration job)
public static MapredWork getMapRedWork(org.apache.hadoop.conf.Configuration job)
public static List<String> getFieldSchemaString(List<FieldSchema> fl)
public static void setMapRedWork(org.apache.hadoop.conf.Configuration job, MapredWork w, String hiveScratchDir)
public static void serializeTasks(Task<? extends Serializable> t, OutputStream out)
public static void serializeQueryPlan(QueryPlan plan, OutputStream out)
public static QueryPlan deserializeQueryPlan(InputStream in, org.apache.hadoop.conf.Configuration conf)
public static void serializeMapRedWork(MapredWork w, OutputStream out)
public static MapredWork deserializeMapRedWork(InputStream in, org.apache.hadoop.conf.Configuration conf)
public static String getTaskId(org.apache.hadoop.conf.Configuration hconf)
public static HashMap makeMap(Object... olist)
public static Properties makeProperties(String... olist)
public static ArrayList makeList(Object... olist)
public static TableDesc getTableDesc(Table tbl)
public static TableDesc getTableDesc(String cols, String colTypes)
public static PartitionDesc getPartitionDesc(Partition part) throws HiveException
HiveException
public static void addMapWork(MapredWork mr, Table tbl, String alias, Operator<?> work)
public static String getOpTreeSkel(Operator<?> op)
public static boolean contentsEqual(InputStream is1, InputStream is2, boolean ignoreWhitespace) throws IOException
IOException
public static String abbreviate(String str, int max)
public static Utilities.StreamStatus readColumn(DataInput in, OutputStream out) throws IOException
IOException
public static OutputStream createCompressedStream(org.apache.hadoop.mapred.JobConf jc, OutputStream out) throws IOException
jc
- Job Configurationout
- Output Stream to be converted into compressed output stream
IOException
public static OutputStream createCompressedStream(org.apache.hadoop.mapred.JobConf jc, OutputStream out, boolean isCompressed) throws IOException
jc
- Job Configurationout
- Output Stream to be converted into compressed output streamisCompressed
- whether the output stream needs to be compressed or not
IOException
public static String getFileExtension(org.apache.hadoop.mapred.JobConf jc, boolean isCompressed)
jc
- Job ConfigurationisCompressed
- Whether the output file is compressed or not
public static org.apache.hadoop.io.SequenceFile.Writer createSequenceWriter(org.apache.hadoop.mapred.JobConf jc, org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path file, Class<?> keyClass, Class<?> valClass) throws IOException
jc
- Job configurationfs
- File System to create file infile
- Path to be createdkeyClass
- Java Class for keyvalClass
- Java Class for value
IOException
public static org.apache.hadoop.io.SequenceFile.Writer createSequenceWriter(org.apache.hadoop.mapred.JobConf jc, org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path file, Class<?> keyClass, Class<?> valClass, boolean isCompressed) throws IOException
jc
- Job configurationfs
- File System to create file infile
- Path to be createdkeyClass
- Java Class for keyvalClass
- Java Class for value
IOException
public static RCFile.Writer createRCFileWriter(org.apache.hadoop.mapred.JobConf jc, org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path file, boolean isCompressed) throws IOException
jc
- Job configurationfs
- File System to create file infile
- Path to be created
IOException
public static String realFile(String newFile, org.apache.hadoop.conf.Configuration conf) throws IOException
IOException
public static List<String> mergeUniqElems(List<String> src, List<String> dest)
public static org.apache.hadoop.fs.Path toTempPath(org.apache.hadoop.fs.Path orig)
public static org.apache.hadoop.fs.Path toTempPath(String orig)
public static boolean isTempPath(org.apache.hadoop.fs.FileStatus file)
public static void rename(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path src, org.apache.hadoop.fs.Path dst) throws IOException, HiveException
fs
- the FileSystem where src and dst are on.src
- the src directorydst
- the target directory
IOException
HiveException
public static void renameOrMoveFiles(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path src, org.apache.hadoop.fs.Path dst) throws IOException, HiveException
fs
- the FileSystem where src and dst are on.src
- the src directorydst
- the target directory
IOException
HiveException
public static String getTaskIdFromFilename(String filename)
public static String replaceTaskIdFromFilename(String filename, int bucketNum)
public static String replaceTaskId(String taskId, int bucketNum)
public static String replaceTaskIdFromFilename(String filename, String oldTaskId, String newTaskId)
filename
- oldTaskId
- newTaskId
-
public static org.apache.hadoop.fs.FileStatus[] getFileStatusRecurse(org.apache.hadoop.fs.Path path, int level, org.apache.hadoop.fs.FileSystem fs) throws IOException
path
- the root pathlevel
- the depth of directory should explorefs
- the file system
IOException
public static void removeTempOrDuplicateFiles(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path path) throws IOException
IOException
public static ArrayList<String> removeTempOrDuplicateFiles(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path path, DynamicPartitionCtx dpCtx) throws IOException
IOException
public static HashMap<String,org.apache.hadoop.fs.FileStatus> removeTempOrDuplicateFiles(org.apache.hadoop.fs.FileStatus[] items, org.apache.hadoop.fs.FileSystem fs) throws IOException
IOException
public static String getNameMessage(Exception e)
public static ClassLoader addToClassPath(ClassLoader cloader, String[] newPaths) throws Exception
newPaths
- Array of classpath elements
Exception
public static void removeFromClassPath(String[] pathsToRemove) throws Exception
pathsToRemove
- Array of classpath elements
Exception
public static String formatBinaryString(byte[] array, int start, int length)
public static List<String> getColumnNamesFromSortCols(List<Order> sortCols)
public static List<String> getColumnNamesFromFieldSchema(List<FieldSchema> partCols)
public static List<String> getColumnNames(Properties props)
public static List<String> getColumnTypes(Properties props)
public static void validateColumnNames(List<String> colNames, List<String> checkCols) throws SemanticException
SemanticException
public static int getDefaultNotificationInterval(org.apache.hadoop.conf.Configuration hconf)
hconf
-
public static void copyTableJobPropertiesToConf(TableDesc tbl, org.apache.hadoop.mapred.JobConf job)
tbl
- table descriptor from which to readjob
- configuration which receives configured propertiespublic static boolean supportCombineFileInputFormat()
|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |