|
||||||||||
PREV NEXT | FRAMES NO FRAMES |
Packages that use JobConf | |
---|---|
org.apache.hadoop.contrib.utils.join | |
org.apache.hadoop.examples | Hadoop example code. |
org.apache.hadoop.examples.dancing | This package is a distributed implementation of Knuth's dancing links algorithm that can run under Hadoop. |
org.apache.hadoop.hbase.mapred | |
org.apache.hadoop.io | Generic i/o code for use when reading and writing data to the network, to databases, and to files. |
org.apache.hadoop.mapred | A system for scalable, fault-tolerant, distributed computation over large data collections. |
org.apache.hadoop.mapred.jobcontrol | Utilities for managing dependent jobs. |
org.apache.hadoop.mapred.lib | Library of generally useful mappers, reducers, and partitioners. |
org.apache.hadoop.mapred.lib.aggregate | Classes for performing various counting and aggregations. |
org.apache.hadoop.mapred.pipes | Hadoop Pipes allows C++ code to use Hadoop DFS and map/reduce. |
org.apache.hadoop.streaming | |
org.apache.hadoop.tools | |
org.apache.hadoop.util | Common utilities. |
Uses of JobConf in org.apache.hadoop.contrib.utils.join |
---|
Fields in org.apache.hadoop.contrib.utils.join declared as JobConf | |
---|---|
protected JobConf |
DataJoinReducerBase.job
|
protected JobConf |
DataJoinMapperBase.job
|
Methods in org.apache.hadoop.contrib.utils.join that return JobConf | |
---|---|
static JobConf |
DataJoinJob.createDataJoinJob(String[] args)
|
Methods in org.apache.hadoop.contrib.utils.join with parameters of type JobConf | |
---|---|
void |
DataJoinReducerBase.configure(JobConf job)
|
void |
DataJoinMapperBase.configure(JobConf job)
|
void |
JobBase.configure(JobConf job)
Initializes a new instance from a JobConf . |
static boolean |
DataJoinJob.runJob(JobConf job)
Submit/run a map/reduce job. |
Uses of JobConf in org.apache.hadoop.examples |
---|
Methods in org.apache.hadoop.examples with parameters of type JobConf | |
---|---|
void |
PiEstimator.PiMapper.configure(JobConf job)
Mapper configuration. |
void |
PiEstimator.PiReducer.configure(JobConf job)
Reducer configuration. |
Uses of JobConf in org.apache.hadoop.examples.dancing |
---|
Methods in org.apache.hadoop.examples.dancing with parameters of type JobConf | |
---|---|
void |
DistributedPentomino.PentMap.configure(JobConf conf)
|
Uses of JobConf in org.apache.hadoop.hbase.mapred |
---|
Methods in org.apache.hadoop.hbase.mapred with parameters of type JobConf | |
---|---|
void |
TableOutputFormat.checkOutputSpecs(FileSystem ignored,
JobConf job)
|
void |
TableMap.configure(JobConf job)
|
void |
TableInputFormat.configure(JobConf job)
|
void |
GroupingTableMap.configure(JobConf job)
|
RecordReader |
TableInputFormat.getRecordReader(InputSplit split,
JobConf job,
Reporter reporter)
|
RecordWriter |
TableOutputFormat.getRecordWriter(FileSystem ignored,
JobConf job,
String name,
Progressable progress)
|
InputSplit[] |
TableInputFormat.getSplits(JobConf job,
int numSplits)
A split will be created for each HRegion of the input table |
static void |
TableReduce.initJob(String table,
Class<? extends TableReduce> reducer,
JobConf job)
Use this before submitting a TableReduce job. |
static void |
TableMap.initJob(String table,
String columns,
Class<? extends TableMap> mapper,
JobConf job)
Use this before submitting a TableMap job. |
static void |
GroupingTableMap.initJob(String table,
String columns,
String groupColumns,
Class<? extends TableMap> mapper,
JobConf job)
Use this before submitting a TableMap job. |
void |
TableInputFormat.validateInput(JobConf job)
|
Uses of JobConf in org.apache.hadoop.io |
---|
Methods in org.apache.hadoop.io with parameters of type JobConf | |
---|---|
static Writable |
WritableUtils.clone(Writable orig,
JobConf conf)
Make a copy of a writable object using serialization to a buffer. |
Uses of JobConf in org.apache.hadoop.mapred |
---|
Methods in org.apache.hadoop.mapred with parameters of type JobConf | |
---|---|
void |
OutputFormatBase.checkOutputSpecs(FileSystem ignored,
JobConf job)
|
void |
OutputFormat.checkOutputSpecs(FileSystem ignored,
JobConf job)
Check whether the output specification for a job is appropriate. |
void |
MapRunner.configure(JobConf job)
|
void |
MapReduceBase.configure(JobConf job)
Default implementation that does nothing. |
void |
TextInputFormat.configure(JobConf conf)
|
void |
JobConfigurable.configure(JobConf job)
Initializes a new instance from a JobConf . |
static boolean |
OutputFormatBase.getCompressOutput(JobConf conf)
Is the reduce output compressed? |
static Class |
OutputFormatBase.getOutputCompressorClass(JobConf conf,
Class defaultValue)
Get the codec for compressing the reduce outputs |
RecordReader |
SequenceFileInputFilter.getRecordReader(InputSplit split,
JobConf job,
Reporter reporter)
Create a record reader for the given split |
RecordReader |
SequenceFileInputFormat.getRecordReader(InputSplit split,
JobConf job,
Reporter reporter)
|
RecordReader |
SequenceFileAsTextInputFormat.getRecordReader(InputSplit split,
JobConf job,
Reporter reporter)
|
abstract RecordReader |
MultiFileInputFormat.getRecordReader(InputSplit split,
JobConf job,
Reporter reporter)
|
RecordReader |
TextInputFormat.getRecordReader(InputSplit genericSplit,
JobConf job,
Reporter reporter)
|
RecordReader |
KeyValueTextInputFormat.getRecordReader(InputSplit genericSplit,
JobConf job,
Reporter reporter)
|
abstract RecordReader |
FileInputFormat.getRecordReader(InputSplit split,
JobConf job,
Reporter reporter)
|
RecordReader |
InputFormat.getRecordReader(InputSplit split,
JobConf job,
Reporter reporter)
Construct a RecordReader for a FileSplit . |
RecordWriter |
TextOutputFormat.getRecordWriter(FileSystem ignored,
JobConf job,
String name,
Progressable progress)
|
RecordWriter |
SequenceFileOutputFormat.getRecordWriter(FileSystem ignored,
JobConf job,
String name,
Progressable progress)
|
abstract RecordWriter |
OutputFormatBase.getRecordWriter(FileSystem ignored,
JobConf job,
String name,
Progressable progress)
|
RecordWriter |
MapFileOutputFormat.getRecordWriter(FileSystem ignored,
JobConf job,
String name,
Progressable progress)
|
RecordWriter |
OutputFormat.getRecordWriter(FileSystem ignored,
JobConf job,
String name,
Progressable progress)
Construct a RecordWriter with Progressable. |
InputSplit[] |
MultiFileInputFormat.getSplits(JobConf job,
int numSplits)
|
InputSplit[] |
FileInputFormat.getSplits(JobConf job,
int numSplits)
Splits files returned by FileInputFormat.listPaths(JobConf) when
they're too big. |
InputSplit[] |
InputFormat.getSplits(JobConf job,
int numSplits)
Splits a set of input files. |
static long |
TaskLog.getTaskLogLength(JobConf conf)
Get the desired maximum length of task's logs. |
static JobClient.TaskStatusFilter |
JobClient.getTaskOutputFilter(JobConf job)
Get the task output filter out of the JobConf |
void |
JobClient.init(JobConf conf)
|
protected Path[] |
SequenceFileInputFormat.listPaths(JobConf job)
|
protected Path[] |
FileInputFormat.listPaths(JobConf job)
List input directories. |
static void |
JobEndNotifier.localRunnerNotification(JobConf conf,
JobStatus status)
|
static void |
JobEndNotifier.registerNotification(JobConf jobConf,
JobStatus status)
|
static RunningJob |
JobClient.runJob(JobConf job)
Utility that submits a job, then polls for progress until the job is complete. |
static void |
OutputFormatBase.setCompressOutput(JobConf conf,
boolean val)
Set whether the output of the reduce is compressed |
static void |
OutputFormatBase.setOutputCompressorClass(JobConf conf,
Class codecClass)
Set the given class as the output compression codec. |
static void |
JobClient.setTaskOutputFilter(JobConf job,
JobClient.TaskStatusFilter newValue)
Modify the JobConf to set the task output filter |
static void |
JobTracker.startTracker(JobConf conf)
Start the JobTracker with given configuration. |
RunningJob |
JobClient.submitJob(JobConf job)
Submit a job to the MR system |
void |
FileInputFormat.validateInput(JobConf job)
|
void |
InputFormat.validateInput(JobConf job)
Are the input directories valid? This method is used to test the input directories when a job is submitted so that the framework can fail early with a useful error message when the input directory does not exist. |
Constructors in org.apache.hadoop.mapred with parameters of type JobConf | |
---|---|
FileSplit(Path file,
long start,
long length,
JobConf conf)
Constructs a split. |
|
JobClient(JobConf conf)
|
|
MultiFileSplit(JobConf job,
Path[] files,
long[] lengths)
|
|
PhasedFileSystem(FileSystem fs,
JobConf conf)
Deprecated. This Constructor is used to wrap a FileSystem object to a Phased FilsSystem. |
|
TaskTracker(JobConf conf)
Start with the local machine name, and the default JobTracker |
Uses of JobConf in org.apache.hadoop.mapred.jobcontrol |
---|
Methods in org.apache.hadoop.mapred.jobcontrol that return JobConf | |
---|---|
JobConf |
Job.getJobConf()
|
Methods in org.apache.hadoop.mapred.jobcontrol with parameters of type JobConf | |
---|---|
void |
Job.setJobConf(JobConf jobConf)
Set the mapred job conf for this job. |
Constructors in org.apache.hadoop.mapred.jobcontrol with parameters of type JobConf | |
---|---|
Job(JobConf jobConf,
ArrayList dependingJobs)
Construct a job. |
Uses of JobConf in org.apache.hadoop.mapred.lib |
---|
Methods in org.apache.hadoop.mapred.lib with parameters of type JobConf | |
---|---|
void |
NullOutputFormat.checkOutputSpecs(FileSystem ignored,
JobConf job)
|
void |
RegexMapper.configure(JobConf job)
|
void |
MultithreadedMapRunner.configure(JobConf job)
|
void |
KeyFieldBasedPartitioner.configure(JobConf job)
|
void |
HashPartitioner.configure(JobConf job)
|
void |
FieldSelectionMapReduce.configure(JobConf job)
|
RecordWriter |
NullOutputFormat.getRecordWriter(FileSystem ignored,
JobConf job,
String name,
Progressable progress)
|
Uses of JobConf in org.apache.hadoop.mapred.lib.aggregate |
---|
Methods in org.apache.hadoop.mapred.lib.aggregate that return JobConf | |
---|---|
static JobConf |
ValueAggregatorJob.createValueAggregatorJob(String[] args)
Create an Aggregate based map/reduce job. |
Methods in org.apache.hadoop.mapred.lib.aggregate with parameters of type JobConf | |
---|---|
void |
ValueAggregatorJobBase.configure(JobConf job)
|
void |
ValueAggregatorCombiner.configure(JobConf job)
Combiner does not need to configure. |
void |
ValueAggregatorBaseDescriptor.configure(JobConf job)
get the input file name. |
void |
ValueAggregatorDescriptor.configure(JobConf job)
Configure the object |
void |
UserDefinedValueAggregatorDescriptor.configure(JobConf job)
Do nothing. |
Constructors in org.apache.hadoop.mapred.lib.aggregate with parameters of type JobConf | |
---|---|
UserDefinedValueAggregatorDescriptor(String className,
JobConf job)
|
Uses of JobConf in org.apache.hadoop.mapred.pipes |
---|
Methods in org.apache.hadoop.mapred.pipes with parameters of type JobConf | |
---|---|
static String |
Submitter.getExecutable(JobConf conf)
Get the URI of the application's executable. |
static boolean |
Submitter.getIsJavaMapper(JobConf conf)
Check whether the job is using a Java Mapper. |
static boolean |
Submitter.getIsJavaRecordReader(JobConf conf)
Check whether the job is using a Java RecordReader |
static boolean |
Submitter.getIsJavaRecordWriter(JobConf conf)
Will the reduce use a Java RecordWriter? |
static boolean |
Submitter.getIsJavaReducer(JobConf conf)
Check whether the job is using a Java Reducer. |
static boolean |
Submitter.getKeepCommandFile(JobConf conf)
Does the user want to keep the command file for debugging? If this is true, pipes will write a copy of the command data to a file in the task directory named "downlink.data", which may be used to run the C++ program under the debugger. |
static void |
Submitter.setExecutable(JobConf conf,
String executable)
Set the URI for the application's executable. |
static void |
Submitter.setIsJavaMapper(JobConf conf,
boolean value)
Set whether the Mapper is written in Java. |
static void |
Submitter.setIsJavaRecordReader(JobConf conf,
boolean value)
Set whether the job is using a Java RecordReader. |
static void |
Submitter.setIsJavaRecordWriter(JobConf conf,
boolean value)
Set whether the job will use a Java RecordWriter. |
static void |
Submitter.setIsJavaReducer(JobConf conf,
boolean value)
Set whether the Reducer is written in Java. |
static void |
Submitter.setKeepCommandFile(JobConf conf,
boolean keep)
Set whether to keep the command file for debugging |
static RunningJob |
Submitter.submitJob(JobConf conf)
Submit a job to the map/reduce cluster. |
Uses of JobConf in org.apache.hadoop.streaming |
---|
Fields in org.apache.hadoop.streaming declared as JobConf | |
---|---|
protected JobConf |
StreamJob.jobConf_
|
Methods in org.apache.hadoop.streaming with parameters of type JobConf | |
---|---|
void |
PipeMapRed.configure(JobConf job)
|
void |
PipeMapper.configure(JobConf job)
|
static FileSplit |
StreamUtil.getCurrentSplit(JobConf job)
|
RecordReader |
StreamInputFormat.getRecordReader(InputSplit genericSplit,
JobConf job,
Reporter reporter)
|
static org.apache.hadoop.streaming.StreamUtil.TaskId |
StreamUtil.getTaskInfo(JobConf job)
|
static boolean |
StreamUtil.isLocalJobTracker(JobConf job)
|
void |
StreamBaseRecordReader.validateInput(JobConf job)
This implementation always returns true. |
Constructors in org.apache.hadoop.streaming with parameters of type JobConf | |
---|---|
StreamBaseRecordReader(FSDataInputStream in,
FileSplit split,
Reporter reporter,
JobConf job,
FileSystem fs)
|
|
StreamXmlRecordReader(FSDataInputStream in,
FileSplit split,
Reporter reporter,
JobConf job,
FileSystem fs)
|
Uses of JobConf in org.apache.hadoop.tools |
---|
Methods in org.apache.hadoop.tools with parameters of type JobConf | |
---|---|
void |
Logalyzer.LogRegexMapper.configure(JobConf job)
|
Uses of JobConf in org.apache.hadoop.util |
---|
Methods in org.apache.hadoop.util with parameters of type JobConf | |
---|---|
abstract void |
CopyFiles.CopyFilesMapper.cleanup(Configuration conf,
JobConf jobConf,
String srcPath,
String destPath)
Interface to cleanup *distcp* specific resources |
void |
CopyFiles.FSCopyFilesMapper.cleanup(Configuration conf,
JobConf jobConf,
String srcPath,
String destPath)
|
void |
CopyFiles.HTTPCopyFilesMapper.cleanup(Configuration conf,
JobConf jobConf,
String srcPath,
String destPath)
|
void |
CopyFiles.FSCopyFilesMapper.configure(JobConf job)
Mapper configuration. |
void |
CopyFiles.HTTPCopyFilesMapper.configure(JobConf job)
|
boolean |
NativeCodeLoader.getLoadNativeLibraries(JobConf jobConf)
Return if native hadoop libraries, if present, can be used for this job. |
void |
NativeCodeLoader.setLoadNativeLibraries(JobConf jobConf,
boolean loadNativeLibraries)
Set if native hadoop libraries, if present, can be used for this job. |
abstract void |
CopyFiles.CopyFilesMapper.setup(Configuration conf,
JobConf jobConf,
String[] srcPaths,
String destPath,
Path logPath,
boolean ignoreReadFailures)
Interface to initialize *distcp* specific map tasks. |
void |
CopyFiles.FSCopyFilesMapper.setup(Configuration conf,
JobConf jobConf,
String[] srcPaths,
String destPath,
Path logPath,
boolean ignoreReadFailures)
Initialize DFSCopyFileMapper specific job-configuration. |
void |
CopyFiles.HTTPCopyFilesMapper.setup(Configuration conf,
JobConf jobConf,
String[] srcPaths,
String destPath,
Path logPath,
boolean ignoreReadFailures)
Initialize HTTPCopyFileMapper specific job. |
|
||||||||||
PREV NEXT | FRAMES NO FRAMES |