View Javadoc

1   /**
2    * Copyright 2009 The Apache Software Foundation
3    *
4    * Licensed to the Apache Software Foundation (ASF) under one
5    * or more contributor license agreements.  See the NOTICE file
6    * distributed with this work for additional information
7    * regarding copyright ownership.  The ASF licenses this file
8    * to you under the Apache License, Version 2.0 (the
9    * "License"); you may not use this file except in compliance
10   * with the License.  You may obtain a copy of the License at
11   *
12   *     http://www.apache.org/licenses/LICENSE-2.0
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS,
16   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17   * See the License for the specific language governing permissions and
18   * limitations under the License.
19   */
20  package org.apache.hadoop.hbase.mapreduce;
21  
22  import java.io.IOException;
23  
24  import org.apache.hadoop.conf.Configuration;
25  import org.apache.hadoop.fs.Path;
26  import org.apache.hadoop.hbase.HBaseConfiguration;
27  import org.apache.hadoop.hbase.KeyValue;
28  import org.apache.hadoop.hbase.client.Put;
29  import org.apache.hadoop.hbase.client.Result;
30  import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
31  import org.apache.hadoop.mapreduce.Job;
32  import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
33  import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
34  import org.apache.hadoop.util.GenericOptionsParser;
35  
36  /**
37   * Import data written by {@link Export}.
38   */
39  public class Import {
40    final static String NAME = "import";
41  
42    /**
43     * Write table content out to files in hdfs.
44     */
45    static class Importer
46    extends TableMapper<ImmutableBytesWritable, Put> {
47      /**
48       * @param row  The current table row key.
49       * @param value  The columns.
50       * @param context  The current context.
51       * @throws IOException When something is broken with the data.
52       * @see org.apache.hadoop.mapreduce.Mapper#map(KEYIN, VALUEIN,
53       *   org.apache.hadoop.mapreduce.Mapper.Context)
54       */
55      @Override
56      public void map(ImmutableBytesWritable row, Result value,
57        Context context)
58      throws IOException {
59        try {
60          context.write(row, resultToPut(row, value));
61        } catch (InterruptedException e) {
62          e.printStackTrace();
63        }
64      }
65  
66      private static Put resultToPut(ImmutableBytesWritable key, Result result)
67      throws IOException {
68        Put put = new Put(key.get());
69        for (KeyValue kv : result.raw()) {
70          put.add(kv);
71        }
72        return put;
73      }
74    }
75  
76    /**
77     * Sets up the actual job.
78     *
79     * @param conf  The current configuration.
80     * @param args  The command line parameters.
81     * @return The newly created job.
82     * @throws IOException When setting up the job fails.
83     */
84    public static Job createSubmittableJob(Configuration conf, String[] args)
85    throws IOException {
86      String tableName = args[0];
87      Path inputDir = new Path(args[1]);
88      Job job = new Job(conf, NAME + "_" + tableName);
89      job.setJarByClass(Importer.class);
90      FileInputFormat.setInputPaths(job, inputDir);
91      job.setInputFormatClass(SequenceFileInputFormat.class);
92      job.setMapperClass(Importer.class);
93      // No reducers.  Just write straight to table.  Call initTableReducerJob
94      // because it sets up the TableOutputFormat.
95      TableMapReduceUtil.initTableReducerJob(tableName, null, job);
96      job.setNumReduceTasks(0);
97      return job;
98    }
99  
100   /*
101    * @param errorMsg Error message.  Can be null.
102    */
103   private static void usage(final String errorMsg) {
104     if (errorMsg != null && errorMsg.length() > 0) {
105       System.err.println("ERROR: " + errorMsg);
106     }
107     System.err.println("Usage: Import <tablename> <inputdir>");
108   }
109 
110   /**
111    * Main entry point.
112    *
113    * @param args  The command line parameters.
114    * @throws Exception When running the job fails.
115    */
116   public static void main(String[] args) throws Exception {
117     Configuration conf = HBaseConfiguration.create();
118     String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
119     if (otherArgs.length < 2) {
120       usage("Wrong number of arguments: " + otherArgs.length);
121       System.exit(-1);
122     }
123     Job job = createSubmittableJob(conf, otherArgs);
124     System.exit(job.waitForCompletion(true) ? 0 : 1);
125   }
126 }