1   /**
2    * Copyright 2010 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase.mapreduce;
21  
22  import java.io.DataInput;
23  import java.io.DataOutput;
24  import java.io.IOException;
25  import java.util.ArrayList;
26  import java.util.List;
27  
28  import org.apache.hadoop.io.NullWritable;
29  import org.apache.hadoop.io.Writable;
30  import org.apache.hadoop.mapreduce.InputFormat;
31  import org.apache.hadoop.mapreduce.InputSplit;
32  import org.apache.hadoop.mapreduce.JobContext;
33  import org.apache.hadoop.mapreduce.RecordReader;
34  import org.apache.hadoop.mapreduce.TaskAttemptContext;
35  
36  /**
37   * Input format that creates as many map tasks as configured in
38   * mapred.map.tasks, each provided with a single row of
39   * NullWritables. This can be useful when trying to write mappers
40   * which don't have any real input (eg when the mapper is simply
41   * producing random data as output)
42   */
43  public class NMapInputFormat extends InputFormat<NullWritable, NullWritable> {
44  
45    @Override
46    public RecordReader<NullWritable, NullWritable> createRecordReader(
47        InputSplit split,
48        TaskAttemptContext tac) throws IOException, InterruptedException {
49      return new SingleRecordReader<NullWritable, NullWritable>(
50          NullWritable.get(), NullWritable.get());
51    }
52  
53    @Override
54    public List<InputSplit> getSplits(JobContext context) throws IOException,
55        InterruptedException {
56      int count = context.getConfiguration().getInt("mapred.map.tasks", 1);
57      List<InputSplit> splits = new ArrayList<InputSplit>(count);
58      for (int i = 0; i < count; i++) {
59        splits.add(new NullInputSplit());
60      }
61      return splits;
62    }
63  
64    private static class NullInputSplit extends InputSplit implements Writable {
65      @Override
66      public long getLength() throws IOException, InterruptedException {
67        return 0;
68      }
69  
70      @Override
71      public String[] getLocations() throws IOException, InterruptedException {
72        return new String[] {};
73      }
74  
75      @Override
76      public void readFields(DataInput in) throws IOException {
77      }
78  
79      @Override
80      public void write(DataOutput out) throws IOException {
81      }
82    }
83    
84    private static class SingleRecordReader<K, V>
85      extends RecordReader<K, V> {
86      
87      private final K key;
88      private final V value;
89      boolean providedKey = false;
90  
91      SingleRecordReader(K key, V value) {
92        this.key = key;
93        this.value = value;
94      }
95  
96      @Override
97      public void close() {
98      }
99  
100     @Override
101     public K getCurrentKey() {
102       return key;
103     }
104 
105     @Override
106     public V getCurrentValue(){
107       return value;
108     }
109 
110     @Override
111     public float getProgress() {
112       return 0;
113     }
114 
115     @Override
116     public void initialize(InputSplit split, TaskAttemptContext tac) {
117     }
118 
119     @Override
120     public boolean nextKeyValue() {
121       if (providedKey) return false;
122       providedKey = true;
123       return true;
124     }
125     
126   }
127 }