Deprecated Methods |
org.apache.hadoop.hbase.HServerLoad.addRegionInfo(byte[], int, int, int, int, int)
Use HServerLoad.addRegionInfo(RegionLoad) |
org.apache.hadoop.hbase.thrift.ThriftServer.HBaseHandler.atomicIncrement(byte[], byte[], byte[], long)
|
org.apache.hadoop.hbase.client.HTable.checkAndSave(BatchUpdate, HbaseMapWritable, RowLock)
As of hbase 0.20.0, replaced by HTable.checkAndPut(byte[], byte[], byte[], byte[], org.apache.hadoop.hbase.client.Put) |
org.apache.hadoop.hbase.client.HTable.commit(BatchUpdate)
As of hbase 0.20.0, replaced by HTable.delete(Delete) or
HTable.put(Put) |
org.apache.hadoop.hbase.client.HTable.commit(BatchUpdate, RowLock)
As of hbase 0.20.0, replaced by HTable.delete(Delete) or
HTable.put(Put) |
org.apache.hadoop.hbase.client.HTable.commit(List)
As of hbase 0.20.0, replaced by HTable.delete(Delete) or
HTable.put(List) |
org.apache.hadoop.hbase.HStoreKey.compareTo(HStoreKey)
Use Comparators instead. This can give wrong results. |
org.apache.hadoop.hbase.KeyValue.createFirstOnRow(byte[], byte[], long)
|
org.apache.hadoop.hbase.rest.RowModel.delete(byte[], byte[])
|
org.apache.hadoop.hbase.rest.RowModel.delete(byte[], byte[], byte[][])
|
org.apache.hadoop.hbase.rest.TimestampModel.delete(byte[], byte[], byte[][], long)
|
org.apache.hadoop.hbase.rest.TimestampModel.delete(byte[], byte[], long)
|
org.apache.hadoop.hbase.client.HTable.deleteAll(byte[])
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAll(byte[], byte[])
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAll(byte[], byte[], long)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAll(byte[], byte[], long, RowLock)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAll(byte[], long)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAll(String)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAll(String, long)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAll(String, String)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAll(String, String, long)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAllByRegex(byte[], String, long)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAllByRegex(byte[], String, long, RowLock)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAllByRegex(String, String)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteAllByRegex(String, String, long)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteFamily(byte[], byte[])
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteFamily(byte[], byte[], long)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteFamily(byte[], byte[], long, RowLock)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteFamily(String, String)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteFamily(String, String, long)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteFamilyByRegex(byte[], String)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteFamilyByRegex(byte[], String, long)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteFamilyByRegex(byte[], String, long, RowLock)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteFamilyByRegex(String, String)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.deleteFamilyByRegex(String, String, long)
As of hbase 0.20.0, replaced by HTable.delete(Delete) |
org.apache.hadoop.hbase.client.HTable.exists(byte[])
As of hbase 0.20.0, replaced by HTable.exists(Get) |
org.apache.hadoop.hbase.client.HTable.exists(byte[], byte[])
As of hbase 0.20.0, replaced by HTable.exists(Get) |
org.apache.hadoop.hbase.client.HTable.exists(byte[], byte[], long)
As of hbase 0.20.0, replaced by HTable.exists(Get) |
org.apache.hadoop.hbase.client.HTable.exists(byte[], byte[], long, RowLock)
As of hbase 0.20.0, replaced by HTable.exists(Get) |
org.apache.hadoop.hbase.filter.RowFilterInterface.filterColumn(byte[], byte[], byte[])
Use RowFilterInterface.filterColumn(byte[], int, int, byte[], int, int, byte[], int, int)
instead. |
org.apache.hadoop.hbase.filter.RowFilterInterface.filterRowKey(byte[])
Use RowFilterInterface.filterRowKey(byte[], int, int) instead. |
org.apache.hadoop.hbase.client.HTable.get(byte[], byte[])
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.rest.RowModel.get(byte[], byte[])
|
org.apache.hadoop.hbase.thrift.ThriftServer.HBaseHandler.get(byte[], byte[], byte[])
|
org.apache.hadoop.hbase.rest.RowModel.get(byte[], byte[], byte[][])
|
org.apache.hadoop.hbase.rest.TimestampModel.get(byte[], byte[], byte[][], long)
|
org.apache.hadoop.hbase.rest.RowModel.get(byte[], byte[], byte[][], long)
|
org.apache.hadoop.hbase.rest.TimestampModel.get(byte[], byte[], byte[], long)
|
org.apache.hadoop.hbase.rest.TimestampModel.get(byte[], byte[], byte[], long, int)
|
org.apache.hadoop.hbase.client.HTable.get(byte[], byte[], int)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.rest.RowModel.get(byte[], byte[], long)
|
org.apache.hadoop.hbase.client.HTable.get(byte[], byte[], long, int)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.get(String, String)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.get(String, String, int)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.get(String, String, long, int)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getClosestRowBefore(byte[], byte[])
As of hbase 0.20.0, replaced by HTable.getRowOrBefore(byte[], byte[]) |
org.apache.hadoop.hbase.migration.nineteen.regionserver.HStoreFile.getFilterDir(Path, int, byte[])
|
org.apache.hadoop.hbase.client.Scan.getOldFilter()
|
org.apache.hadoop.hbase.client.HTable.getRow(byte[])
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(byte[], byte[][])
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(byte[], byte[][], int)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(byte[], byte[][], long)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(byte[], byte[][], long, int, RowLock)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(byte[], int)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(byte[], long)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(byte[], long, int)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(String)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(String, int)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(String, long)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(String, long, int)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(String, String[])
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(String, String[], int)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(String, String[], long)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getRow(String, String[], long, int, RowLock)
As of hbase 0.20.0, replaced by HTable.get(Get) |
org.apache.hadoop.hbase.client.HTable.getScanner(byte[][])
As of hbase 0.20.0, replaced by HTable.getScanner(Scan) |
org.apache.hadoop.hbase.client.HTable.getScanner(byte[][], byte[])
As of hbase 0.20.0, replaced by HTable.getScanner(Scan) |
org.apache.hadoop.hbase.client.HTable.getScanner(byte[][], byte[], byte[])
As of hbase 0.20.0, replaced by HTable.getScanner(Scan) |
org.apache.hadoop.hbase.client.HTable.getScanner(byte[][], byte[], byte[], long)
As of hbase 0.20.0, replaced by HTable.getScanner(Scan) |
org.apache.hadoop.hbase.client.HTable.getScanner(byte[][], byte[], long)
As of hbase 0.20.0, replaced by HTable.getScanner(Scan) |
org.apache.hadoop.hbase.client.HTable.getScanner(byte[][], byte[], long, RowFilterInterface)
As of hbase 0.20.0, replaced by HTable.getScanner(Scan) |
org.apache.hadoop.hbase.client.HTable.getScanner(byte[][], byte[], RowFilterInterface)
As of hbase 0.20.0, replaced by HTable.getScanner(Scan) |
org.apache.hadoop.hbase.client.HTable.getScanner(String[])
As of hbase 0.20.0, replaced by HTable.getScanner(Scan) |
org.apache.hadoop.hbase.client.HTable.getScanner(String[], String)
As of hbase 0.20.0, replaced by HTable.getScanner(Scan) |
org.apache.hadoop.hbase.client.HTable.getScanner(String[], String, long, RowFilterInterface)
As of hbase 0.20.0, replaced by HTable.getScanner(Scan) |
org.apache.hadoop.hbase.client.HTable.getScanner(String[], String, String, long)
As of hbase 0.20.0, replaced by HTable.getScanner(Scan) |
org.apache.hadoop.hbase.thrift.ThriftServer.HBaseHandler.getVer(byte[], byte[], byte[], int)
|
org.apache.hadoop.hbase.thrift.ThriftServer.HBaseHandler.getVerTs(byte[], byte[], byte[], long, int)
|
org.apache.hadoop.hbase.client.HTablePool.newHTable(String)
Use createHTable |
org.apache.hadoop.hbase.filter.RowFilterInterface.rowProcessed(boolean, byte[])
Use RowFilterInterface.rowProcessed(boolean, byte[], int, int) instead. |
org.apache.hadoop.hbase.filter.RegExpRowFilter.setColumnFilter(byte[], byte[])
Column filtering has been replaced by ColumnValueFilter
Specify a value that must be matched for the given column. |
org.apache.hadoop.hbase.filter.RegExpRowFilter.setColumnFilters(Map)
Column filtering has been replaced by ColumnValueFilter
Set column filters for a number of columns. |
org.apache.hadoop.hbase.client.Scan.setOldFilter(RowFilterInterface)
|