1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.mapred;
20
21 import java.io.IOException;
22
23 import org.apache.commons.logging.Log;
24 import org.apache.commons.logging.LogFactory;
25 import org.apache.hadoop.fs.Path;
26 import org.apache.hadoop.hbase.HBaseConfiguration;
27 import org.apache.hadoop.hbase.client.HTable;
28 import org.apache.hadoop.hbase.util.Bytes;
29 import org.apache.hadoop.mapred.FileInputFormat;
30 import org.apache.hadoop.mapred.JobConf;
31 import org.apache.hadoop.mapred.JobConfigurable;
32 import org.apache.hadoop.util.StringUtils;
33
34
35
36
37 @Deprecated
38 public class TableInputFormat extends TableInputFormatBase implements
39 JobConfigurable {
40 private final Log LOG = LogFactory.getLog(TableInputFormat.class);
41
42
43
44
45 public static final String COLUMN_LIST = "hbase.mapred.tablecolumns";
46
47 public void configure(JobConf job) {
48 Path[] tableNames = FileInputFormat.getInputPaths(job);
49 String colArg = job.get(COLUMN_LIST);
50 String[] colNames = colArg.split(" ");
51 byte [][] m_cols = new byte[colNames.length][];
52 for (int i = 0; i < m_cols.length; i++) {
53 m_cols[i] = Bytes.toBytes(colNames[i]);
54 }
55 setInputColumns(m_cols);
56 try {
57 setHTable(new HTable(HBaseConfiguration.create(job), tableNames[0].getName()));
58 } catch (Exception e) {
59 LOG.error(StringUtils.stringifyException(e));
60 }
61 }
62
63 public void validateInput(JobConf job) throws IOException {
64
65 Path [] tableNames = FileInputFormat.getInputPaths(job);
66 if (tableNames == null || tableNames.length > 1) {
67 throw new IOException("expecting one table name");
68 }
69
70
71 if (getHTable() == null) {
72 throw new IOException("could not connect to table '" +
73 tableNames[0].getName() + "'");
74 }
75
76
77 String colArg = job.get(COLUMN_LIST);
78 if (colArg == null || colArg.length() == 0) {
79 throw new IOException("expecting at least one column");
80 }
81 }
82 }