1 /** 2 * Licensed to the Apache Software Foundation (ASF) under one 3 * or more contributor license agreements. See the NOTICE file 4 * distributed with this work for additional information 5 * regarding copyright ownership. The ASF licenses this file 6 * to you under the Apache License, Version 2.0 (the 7 * "License"); you may not use this file except in compliance 8 * with the License. You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 */ 18 19 package org.apache.hadoop.hbase; 20 21 import java.io.IOException; 22 23 24 /** 25 * A compatibility shim layer for interacting with different versions of Hadoop. 26 */ 27 //NOTE: we can move this under src/main if main code wants to use this shim layer 28 public interface HadoopShims { 29 30 /** 31 * Returns a TaskAttemptContext instance created from the given parameters. 32 * @param job an instance of o.a.h.mapreduce.Job 33 * @param taskId an identifier for the task attempt id. Should be parsable by 34 * TaskAttemptId.forName() 35 * @return a concrete TaskAttemptContext instance of o.a.h.mapreduce.TaskAttemptContext 36 */ 37 <T,J> T createTestTaskAttemptContext(final J job, final String taskId); 38 39 /** 40 * Returns an array of DatanodeInfo for all live datanodes in the cluster 41 * @param dfs instance of DistributedFileSystem 42 * @return 43 */ 44 <I,DFS> I[] getLiveDatanodes(DFS dfs) throws IOException; 45 }