1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.mapred;
21
22 import java.io.IOException;
23
24 import org.apache.commons.logging.Log;
25 import org.apache.commons.logging.LogFactory;
26 import org.apache.hadoop.fs.Path;
27 import org.apache.hadoop.hbase.HBaseConfiguration;
28 import org.apache.hadoop.hbase.client.HTable;
29 import org.apache.hadoop.hbase.util.Bytes;
30 import org.apache.hadoop.mapred.FileInputFormat;
31 import org.apache.hadoop.mapred.JobConf;
32 import org.apache.hadoop.mapred.JobConfigurable;
33 import org.apache.hadoop.util.StringUtils;
34
35
36
37
38 @Deprecated
39 public class TableInputFormat extends TableInputFormatBase implements
40 JobConfigurable {
41 private final Log LOG = LogFactory.getLog(TableInputFormat.class);
42
43
44
45
46 public static final String COLUMN_LIST = "hbase.mapred.tablecolumns";
47
48 public void configure(JobConf job) {
49 Path[] tableNames = FileInputFormat.getInputPaths(job);
50 String colArg = job.get(COLUMN_LIST);
51 String[] colNames = colArg.split(" ");
52 byte [][] m_cols = new byte[colNames.length][];
53 for (int i = 0; i < m_cols.length; i++) {
54 m_cols[i] = Bytes.toBytes(colNames[i]);
55 }
56 setInputColumns(m_cols);
57 try {
58 setHTable(new HTable(HBaseConfiguration.create(job), tableNames[0].getName()));
59 } catch (Exception e) {
60 LOG.error(StringUtils.stringifyException(e));
61 }
62 }
63
64 public void validateInput(JobConf job) throws IOException {
65
66 Path [] tableNames = FileInputFormat.getInputPaths(job);
67 if (tableNames == null || tableNames.length > 1) {
68 throw new IOException("expecting one table name");
69 }
70
71
72 if (getHTable() == null) {
73 throw new IOException("could not connect to table '" +
74 tableNames[0].getName() + "'");
75 }
76
77
78 String colArg = job.get(COLUMN_LIST);
79 if (colArg == null || colArg.length() == 0) {
80 throw new IOException("expecting at least one column");
81 }
82 }
83 }