public class RowCalcReducer extends org.apache.hadoop.mapreduce.Reducer<org.apache.hadoop.io.IntWritable,BytesArrayWritable,org.apache.hadoop.io.LongWritable,org.apache.hadoop.io.Text>
For each row (as indicated by key = hash(selector)), iterates over each dataElement and calculates the column values.
Emits <colNum, colVal>
Constructor and Description |
---|
RowCalcReducer() |
Modifier and Type | Method and Description |
---|---|
void |
cleanup(org.apache.hadoop.mapreduce.Reducer.Context ctx) |
void |
reduce(org.apache.hadoop.io.IntWritable rowIndex,
java.lang.Iterable<BytesArrayWritable> dataElementPartitions,
org.apache.hadoop.mapreduce.Reducer.Context ctx) |
void |
setup(org.apache.hadoop.mapreduce.Reducer.Context ctx) |
public void setup(org.apache.hadoop.mapreduce.Reducer.Context ctx) throws java.io.IOException, java.lang.InterruptedException
setup
in class org.apache.hadoop.mapreduce.Reducer<org.apache.hadoop.io.IntWritable,BytesArrayWritable,org.apache.hadoop.io.LongWritable,org.apache.hadoop.io.Text>
java.io.IOException
java.lang.InterruptedException
public void reduce(org.apache.hadoop.io.IntWritable rowIndex, java.lang.Iterable<BytesArrayWritable> dataElementPartitions, org.apache.hadoop.mapreduce.Reducer.Context ctx) throws java.io.IOException, java.lang.InterruptedException
reduce
in class org.apache.hadoop.mapreduce.Reducer<org.apache.hadoop.io.IntWritable,BytesArrayWritable,org.apache.hadoop.io.LongWritable,org.apache.hadoop.io.Text>
java.io.IOException
java.lang.InterruptedException
public void cleanup(org.apache.hadoop.mapreduce.Reducer.Context ctx) throws java.io.IOException, java.lang.InterruptedException
cleanup
in class org.apache.hadoop.mapreduce.Reducer<org.apache.hadoop.io.IntWritable,BytesArrayWritable,org.apache.hadoop.io.LongWritable,org.apache.hadoop.io.Text>
java.io.IOException
java.lang.InterruptedException