Deprecated API


Contents
Deprecated Interfaces
org.apache.hadoop.hbase.mapred.TableMap
           
org.apache.hadoop.hbase.mapred.TableReduce
           
 

Deprecated Classes
org.apache.hadoop.hbase.mapred.Driver
           
org.apache.hadoop.hbase.mapred.GroupingTableMap
           
org.apache.hadoop.hbase.mapred.HRegionPartitioner
           
org.apache.hadoop.hbase.mapred.IdentityTableMap
           
org.apache.hadoop.hbase.mapred.IdentityTableReduce
           
org.apache.hadoop.hbase.client.MultiPut
          Use MultiAction instead Data type class for putting multiple regions worth of puts in one RPC. 
org.apache.hadoop.hbase.client.MultiPutResponse
          Replaced by MultiResponse Response class for MultiPut. 
org.apache.hadoop.hbase.mapred.RowCounter
           
org.apache.hadoop.hbase.mapred.TableInputFormat
           
org.apache.hadoop.hbase.mapred.TableInputFormatBase
           
org.apache.hadoop.hbase.mapred.TableMapReduceUtil
           
org.apache.hadoop.hbase.mapred.TableOutputFormat
           
org.apache.hadoop.hbase.mapred.TableSplit
           
 

Deprecated Enums
org.apache.hadoop.hbase.HColumnDescriptor.CompressionType
          Compression now means which compression library rather than 'what' to compress. 
 

Deprecated Fields
org.apache.hadoop.hbase.mapreduce.SimpleTotalOrderPartitioner.END
           
org.apache.hadoop.hbase.mapreduce.SimpleTotalOrderPartitioner.START
           
 

Deprecated Methods
org.apache.hadoop.hbase.client.Put.add(byte[], long, byte[])
          use Put.add(byte[], byte[], long, byte[]) instead 
org.apache.hadoop.hbase.client.Scan.addColumn(byte[])
          use Scan.addColumn(byte[], byte[]) instead 
org.apache.hadoop.hbase.client.Get.addColumn(byte[])
          use Get.addColumn(byte[], byte[]) instead 
org.apache.hadoop.hbase.client.Scan.addColumns(byte[][])
          issue multiple Scan.addColumn(byte[], byte[]) instead 
org.apache.hadoop.hbase.client.Get.addColumns(byte[][])
          issue multiple Get.addColumn(byte[], byte[]) instead 
org.apache.hadoop.hbase.client.Scan.addColumns(String)
          use Scan.addColumn(byte[], byte[]) instead 
org.apache.hadoop.hbase.HServerLoad.addRegionInfo(byte[], int, int, int, int, int)
          Use HServerLoad.addRegionInfo(RegionLoad) 
org.apache.hadoop.hbase.thrift.ThriftServer.HBaseHandler.atomicIncrement(byte[], byte[], byte[], long)
           
org.apache.hadoop.hbase.thrift.generated.TCell.clone()
           
org.apache.hadoop.hbase.thrift.generated.TRegionInfo.clone()
           
org.apache.hadoop.hbase.thrift.generated.BatchMutation.clone()
           
org.apache.hadoop.hbase.thrift.generated.IllegalArgument.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.enableTable_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.enableTable_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.disableTable_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.disableTable_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.isTableEnabled_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.isTableEnabled_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.compact_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.compact_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.majorCompact_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.majorCompact_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getTableNames_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getTableNames_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getColumnDescriptors_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getColumnDescriptors_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getTableRegions_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getTableRegions_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.createTable_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.createTable_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.deleteTable_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.deleteTable_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.get_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.get_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getVer_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getVer_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getVerTs_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getVerTs_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRow_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRow_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowWithColumns_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowWithColumns_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowTs_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowTs_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowWithColumnsTs_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowWithColumnsTs_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRows_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRows_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowsWithColumns_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowsWithColumns_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowsTs_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowsTs_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowsWithColumnsTs_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.getRowsWithColumnsTs_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.mutateRow_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.mutateRow_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.mutateRowTs_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.mutateRowTs_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.mutateRows_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.mutateRows_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.mutateRowsTs_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.mutateRowsTs_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.atomicIncrement_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.atomicIncrement_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.deleteAll_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.deleteAll_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.deleteAllTs_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.deleteAllTs_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.deleteAllRow_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.deleteAllRow_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.deleteAllRowTs_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.deleteAllRowTs_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerOpen_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerOpen_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerOpenWithStop_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerOpenWithStop_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerOpenWithPrefix_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerOpenWithPrefix_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerOpenTs_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerOpenTs_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerOpenWithStopTs_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerOpenWithStopTs_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerGet_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerGet_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerGetList_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerGetList_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerClose_args.clone()
           
org.apache.hadoop.hbase.thrift.generated.Hbase.scannerClose_result.clone()
           
org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor.clone()
           
org.apache.hadoop.hbase.thrift.generated.IOError.clone()
           
org.apache.hadoop.hbase.thrift.generated.TRowResult.clone()
           
org.apache.hadoop.hbase.thrift.generated.AlreadyExists.clone()
           
org.apache.hadoop.hbase.thrift.generated.Mutation.clone()
           
org.apache.hadoop.hbase.KeyValue.createFirstOnRow(byte[], byte[], long)
            
org.apache.hadoop.hbase.RemoteExceptionHandler.decodeRemoteException(RemoteException)
          Use RemoteException.unwrapRemoteException() instead. In fact we should look into deprecating this whole class - St.Ack 2010929 
org.apache.hadoop.hbase.client.Delete.deleteColumn(byte[])
          use Delete.deleteColumn(byte[], byte[]) instead 
org.apache.hadoop.hbase.client.Delete.deleteColumns(byte[], long)
          use Delete.deleteColumn(byte[], byte[], long) instead 
org.apache.hadoop.hbase.thrift.ThriftServer.HBaseHandler.get(byte[], byte[], byte[])
           
org.apache.hadoop.hbase.client.Scan.getInputColumns()
            
org.apache.hadoop.hbase.regionserver.StoreFile.Reader.getScanner(boolean, boolean)
           
org.apache.hadoop.hbase.thrift.ThriftServer.HBaseHandler.getVer(byte[], byte[], byte[], int)
           
org.apache.hadoop.hbase.thrift.ThriftServer.HBaseHandler.getVerTs(byte[], byte[], byte[], long, int)
           
org.apache.hadoop.hbase.client.HBaseAdmin.modifyColumn(byte[], byte[], HColumnDescriptor)
          The columnName is redundant. Use HBaseAdmin.modifyColumn(byte[], HColumnDescriptor) 
org.apache.hadoop.hbase.client.HBaseAdmin.modifyColumn(String, String, HColumnDescriptor)
          The columnName is redundant. Use HBaseAdmin.addColumn(String, HColumnDescriptor) 
org.apache.hadoop.hbase.regionserver.HRegionServer.multiPut(MultiPut)
          Use HRegionServer.multi( MultiAction action) instead 
org.apache.hadoop.hbase.client.HConnection.processBatchOfPuts(List, byte[], ExecutorService)
          Use HConnectionManager::processBatch instead. 
org.apache.hadoop.hbase.client.Result.sorted()
            
 

Deprecated Constructors
org.apache.hadoop.hbase.HBaseConfiguration()
           
org.apache.hadoop.hbase.HBaseConfiguration(Configuration)
           
 



Copyright © 2011 The Apache Software Foundation. All Rights Reserved.