|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |
java.lang.Objectorg.apache.hadoop.hive.ql.exec.Utilities
public class Utilities
Nested Class Summary | |
---|---|
static class |
Utilities.EnumDelegate
Java 1.5 workaround. |
static class |
Utilities.ReduceField
The object in the reducer are composed of these top level fields |
static class |
Utilities.StreamPrinter
|
static class |
Utilities.streamStatus
|
static class |
Utilities.Tuple<T,V>
|
Field Summary | |
---|---|
static int |
ctrlaCode
|
static tableDesc |
defaultTd
|
static String |
INDENT
|
static int |
newLineCode
|
static String |
NSTR
|
static String |
nullStringOutput
|
static String |
nullStringStorage
|
static Random |
randGen
|
static int |
tabCode
|
Constructor Summary | |
---|---|
Utilities()
|
Method Summary | |
---|---|
static String |
abbreviate(String str,
int max)
convert "From src insert blah blah" to "From src insert ... |
static void |
addMapWork(mapredWork mr,
Table tbl,
String alias,
Operator<?> work)
|
static ClassLoader |
addToClassPath(ClassLoader cloader,
String[] newPaths)
Add new elements to the classpath |
static void |
clearMapRedWork(org.apache.hadoop.conf.Configuration job)
|
static boolean |
contentsEqual(InputStream is1,
InputStream is2,
boolean ignoreWhitespace)
|
static OutputStream |
createCompressedStream(org.apache.hadoop.mapred.JobConf jc,
OutputStream out)
Convert an output stream to a compressed output stream based on codecs and compression options specified in the Job Configuration. |
static OutputStream |
createCompressedStream(org.apache.hadoop.mapred.JobConf jc,
OutputStream out,
boolean isCompressed)
Convert an output stream to a compressed output stream based on codecs codecs in the Job Configuration. |
static RCFile.Writer |
createRCFileWriter(org.apache.hadoop.mapred.JobConf jc,
org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path file,
boolean isCompressed)
Create a RCFile output stream based on job configuration Uses user supplied compression flag (rather than obtaining it from the Job Configuration) |
static org.apache.hadoop.io.SequenceFile.Writer |
createSequenceWriter(org.apache.hadoop.mapred.JobConf jc,
org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path file,
Class<?> keyClass,
Class<?> valClass)
Create a sequencefile output stream based on job configuration |
static org.apache.hadoop.io.SequenceFile.Writer |
createSequenceWriter(org.apache.hadoop.mapred.JobConf jc,
org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path file,
Class<?> keyClass,
Class<?> valClass,
boolean isCompressed)
Create a sequencefile output stream based on job configuration Uses user supplied compression flag (rather than obtaining it from the Job Configuration) |
static mapredWork |
deserializeMapRedWork(InputStream in,
org.apache.hadoop.conf.Configuration conf)
|
static String |
formatBinaryString(byte[] array,
int start,
int length)
|
static List<String> |
getColumnNames(Properties props)
|
static List<String> |
getColumnNamesFromFieldSchema(List<FieldSchema> partCols)
|
static List<String> |
getColumnNamesFromSortCols(List<Order> sortCols)
|
static int |
getDefaultNotificationInterval(org.apache.hadoop.conf.Configuration hconf)
Gets the default notification interval to send progress updates to the tracker. |
static List<String> |
getFieldSchemaString(List<FieldSchema> fl)
|
static String |
getFileExtension(org.apache.hadoop.mapred.JobConf jc,
boolean isCompressed)
Based on compression option and configured output codec - get extension for output file. |
static String |
getJobName(org.apache.hadoop.conf.Configuration job)
|
static mapredWork |
getMapRedWork(org.apache.hadoop.conf.Configuration job)
|
static String |
getNameMessage(Exception e)
|
static String |
getOpTreeSkel(Operator<?> op)
|
static partitionDesc |
getPartitionDesc(Partition part)
|
static tableDesc |
getTableDesc(Table tbl)
|
static String |
getTaskId(org.apache.hadoop.conf.Configuration hconf)
Gets the task id if we are running as a Hadoop job. |
static String |
getTaskIdFromFilename(String filename)
Get the task id from the filename. |
static boolean |
isTempPath(org.apache.hadoop.fs.FileStatus file)
Detect if the supplied file is a temporary path |
static ArrayList |
makeList(Object... olist)
|
static HashMap |
makeMap(Object... olist)
|
static Properties |
makeProperties(String... olist)
|
static List<String> |
mergeUniqElems(List<String> src,
List<String> dest)
|
static Utilities.streamStatus |
readColumn(DataInput in,
OutputStream out)
|
static String |
realFile(String newFile,
org.apache.hadoop.conf.Configuration conf)
Shamelessly cloned from GenericOptionsParser |
static void |
removeFromClassPath(String[] pathsToRemove)
remove elements from the classpath |
static void |
removeTempOrDuplicateFiles(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path path)
Remove all temporary files and duplicate (double-committed) files from a given directory. |
static void |
rename(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path src,
org.apache.hadoop.fs.Path dst)
Rename src to dst, or in the case dst already exists, move files in src to dst. |
static void |
renameOrMoveFiles(org.apache.hadoop.fs.FileSystem fs,
org.apache.hadoop.fs.Path src,
org.apache.hadoop.fs.Path dst)
Rename src to dst, or in the case dst already exists, move files in src to dst. |
static int |
sanitizedJobId(org.apache.hadoop.conf.Configuration job)
Returns a unique ID for the job. |
static void |
serializeMapRedWork(mapredWork w,
OutputStream out)
Serialize the plan object to an output stream. |
static void |
serializeTasks(Task<? extends Serializable> t,
OutputStream out)
|
static void |
setMapRedWork(org.apache.hadoop.conf.Configuration job,
mapredWork w)
|
static org.apache.hadoop.fs.Path |
toTempPath(org.apache.hadoop.fs.Path orig)
|
static org.apache.hadoop.fs.Path |
toTempPath(String orig)
Given a path, convert to a temporary path |
static void |
validateColumnNames(List<String> colNames,
List<String> checkCols)
|
Methods inherited from class java.lang.Object |
---|
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait |
Field Detail |
---|
public static tableDesc defaultTd
public static final int newLineCode
public static final int tabCode
public static final int ctrlaCode
public static final String INDENT
public static String nullStringStorage
public static String nullStringOutput
public static Random randGen
public static final String NSTR
Constructor Detail |
---|
public Utilities()
Method Detail |
---|
public static void clearMapRedWork(org.apache.hadoop.conf.Configuration job)
public static mapredWork getMapRedWork(org.apache.hadoop.conf.Configuration job)
public static List<String> getFieldSchemaString(List<FieldSchema> fl)
public static void setMapRedWork(org.apache.hadoop.conf.Configuration job, mapredWork w)
public static String getJobName(org.apache.hadoop.conf.Configuration job)
public static int sanitizedJobId(org.apache.hadoop.conf.Configuration job)
public static void serializeTasks(Task<? extends Serializable> t, OutputStream out)
public static void serializeMapRedWork(mapredWork w, OutputStream out)
public static mapredWork deserializeMapRedWork(InputStream in, org.apache.hadoop.conf.Configuration conf)
public static String getTaskId(org.apache.hadoop.conf.Configuration hconf)
public static HashMap makeMap(Object... olist)
public static Properties makeProperties(String... olist)
public static ArrayList makeList(Object... olist)
public static tableDesc getTableDesc(Table tbl)
public static partitionDesc getPartitionDesc(Partition part) throws HiveException
HiveException
public static void addMapWork(mapredWork mr, Table tbl, String alias, Operator<?> work)
public static String getOpTreeSkel(Operator<?> op)
public static boolean contentsEqual(InputStream is1, InputStream is2, boolean ignoreWhitespace) throws IOException
IOException
public static String abbreviate(String str, int max)
public static Utilities.streamStatus readColumn(DataInput in, OutputStream out) throws IOException
IOException
public static OutputStream createCompressedStream(org.apache.hadoop.mapred.JobConf jc, OutputStream out) throws IOException
jc
- Job Configurationout
- Output Stream to be converted into compressed output stream
IOException
public static OutputStream createCompressedStream(org.apache.hadoop.mapred.JobConf jc, OutputStream out, boolean isCompressed) throws IOException
jc
- Job Configurationout
- Output Stream to be converted into compressed output streamisCompressed
- whether the output stream needs to be compressed or not
IOException
public static String getFileExtension(org.apache.hadoop.mapred.JobConf jc, boolean isCompressed)
jc
- Job ConfigurationisCompressed
- Whether the output file is compressed or not
public static org.apache.hadoop.io.SequenceFile.Writer createSequenceWriter(org.apache.hadoop.mapred.JobConf jc, org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path file, Class<?> keyClass, Class<?> valClass) throws IOException
jc
- Job configurationfs
- File System to create file infile
- Path to be createdkeyClass
- Java Class for keyvalClass
- Java Class for value
IOException
public static org.apache.hadoop.io.SequenceFile.Writer createSequenceWriter(org.apache.hadoop.mapred.JobConf jc, org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path file, Class<?> keyClass, Class<?> valClass, boolean isCompressed) throws IOException
jc
- Job configurationfs
- File System to create file infile
- Path to be createdkeyClass
- Java Class for keyvalClass
- Java Class for value
IOException
public static RCFile.Writer createRCFileWriter(org.apache.hadoop.mapred.JobConf jc, org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path file, boolean isCompressed) throws IOException
jc
- Job configurationfs
- File System to create file infile
- Path to be created
IOException
public static String realFile(String newFile, org.apache.hadoop.conf.Configuration conf) throws IOException
IOException
public static List<String> mergeUniqElems(List<String> src, List<String> dest)
public static org.apache.hadoop.fs.Path toTempPath(org.apache.hadoop.fs.Path orig)
public static org.apache.hadoop.fs.Path toTempPath(String orig)
public static boolean isTempPath(org.apache.hadoop.fs.FileStatus file)
public static void rename(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path src, org.apache.hadoop.fs.Path dst) throws IOException, HiveException
fs
- the FileSystem where src and dst are on.src
- the src directorydst
- the target directory
IOException
HiveException
public static void renameOrMoveFiles(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path src, org.apache.hadoop.fs.Path dst) throws IOException, HiveException
fs
- the FileSystem where src and dst are on.src
- the src directorydst
- the target directory
IOException
HiveException
public static String getTaskIdFromFilename(String filename)
public static void removeTempOrDuplicateFiles(org.apache.hadoop.fs.FileSystem fs, org.apache.hadoop.fs.Path path) throws IOException
IOException
public static String getNameMessage(Exception e)
public static ClassLoader addToClassPath(ClassLoader cloader, String[] newPaths) throws Exception
newPaths
- Array of classpath elements
Exception
public static void removeFromClassPath(String[] pathsToRemove) throws Exception
pathsToRemove
- Array of classpath elements
Exception
public static String formatBinaryString(byte[] array, int start, int length)
public static List<String> getColumnNamesFromSortCols(List<Order> sortCols)
public static List<String> getColumnNamesFromFieldSchema(List<FieldSchema> partCols)
public static List<String> getColumnNames(Properties props)
public static void validateColumnNames(List<String> colNames, List<String> checkCols) throws SemanticException
SemanticException
public static int getDefaultNotificationInterval(org.apache.hadoop.conf.Configuration hconf)
hconf
-
|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |