View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  
19  package org.apache.hadoop.hbase;
20  
21  import java.io.IOException;
22  
23  import org.apache.hadoop.hdfs.DFSClient;
24  import org.apache.hadoop.hdfs.DistributedFileSystem;
25  import org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType;
26  import org.apache.hadoop.mapreduce.Job;
27  import org.apache.hadoop.mapreduce.TaskAttemptContext;
28  import org.apache.hadoop.mapreduce.TaskAttemptID;
29  
30  /**
31   * Compatibility shim layer implementation for Hadoop-1.
32   */
33  public class HadoopShimsImpl implements HadoopShims {
34  
35    /**
36     * Returns a TaskAttemptContext instance created from the given parameters.
37     * @param job an instance of o.a.h.mapreduce.Job
38     * @param taskId an identifier for the task attempt id. Should be parsable by
39     * TaskAttemptId.forName()
40     * @return a concrete TaskAttemptContext instance of o.a.h.mapreduce.TaskAttemptContext
41     */
42    @Override
43    @SuppressWarnings("unchecked")
44    public <T, J> T createTestTaskAttemptContext(J job, String taskId) {
45      Job j = (Job)job;
46      return (T)new TaskAttemptContext(j.getConfiguration(), TaskAttemptID.forName(taskId));
47    }
48  
49    /**
50     * Returns an array of DatanodeInfo for all live datanodes in the cluster
51     * @param dfs instance of DistributedFileSystem
52     * @return
53     * @throws IOException 
54     */
55    @Override
56    @SuppressWarnings("unchecked")
57    public <I, DFS> I[] getLiveDatanodes(DFS dfs) throws IOException {
58      DFSClient dfsClient = ((DistributedFileSystem)dfs).getClient();
59      return (I[])dfsClient.datanodeReport(DatanodeReportType.LIVE);
60    }
61  
62  }