1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.thrift;
20
21 import static org.apache.hadoop.hbase.util.Bytes.getBytes;
22
23 import java.io.IOException;
24 import java.net.InetAddress;
25 import java.net.InetSocketAddress;
26 import java.net.UnknownHostException;
27 import java.nio.ByteBuffer;
28 import java.security.PrivilegedAction;
29 import java.util.ArrayList;
30 import java.util.Arrays;
31 import java.util.Collections;
32 import java.util.HashMap;
33 import java.util.List;
34 import java.util.Map;
35 import java.util.TreeMap;
36 import java.util.concurrent.BlockingQueue;
37 import java.util.concurrent.ExecutorService;
38 import java.util.concurrent.LinkedBlockingQueue;
39 import java.util.concurrent.ThreadPoolExecutor;
40 import java.util.concurrent.TimeUnit;
41
42 import javax.security.auth.callback.Callback;
43 import javax.security.auth.callback.UnsupportedCallbackException;
44 import javax.security.sasl.AuthorizeCallback;
45 import javax.security.sasl.Sasl;
46 import javax.security.sasl.SaslServer;
47
48 import org.apache.commons.cli.CommandLine;
49 import org.apache.commons.cli.Option;
50 import org.apache.commons.cli.OptionGroup;
51 import org.apache.commons.logging.Log;
52 import org.apache.commons.logging.LogFactory;
53 import org.apache.hadoop.hbase.classification.InterfaceAudience;
54 import org.apache.hadoop.conf.Configuration;
55 import org.apache.hadoop.hbase.HBaseConfiguration;
56 import org.apache.hadoop.hbase.HColumnDescriptor;
57 import org.apache.hadoop.hbase.HConstants;
58 import org.apache.hadoop.hbase.HRegionInfo;
59 import org.apache.hadoop.hbase.HTableDescriptor;
60 import org.apache.hadoop.hbase.KeyValue;
61 import org.apache.hadoop.hbase.ServerName;
62 import org.apache.hadoop.hbase.TableName;
63 import org.apache.hadoop.hbase.TableNotFoundException;
64 import org.apache.hadoop.hbase.client.Delete;
65 import org.apache.hadoop.hbase.client.Durability;
66 import org.apache.hadoop.hbase.client.Get;
67 import org.apache.hadoop.hbase.client.HBaseAdmin;
68 import org.apache.hadoop.hbase.client.HTable;
69 import org.apache.hadoop.hbase.client.HTableInterface;
70 import org.apache.hadoop.hbase.client.Increment;
71 import org.apache.hadoop.hbase.client.OperationWithAttributes;
72 import org.apache.hadoop.hbase.client.Put;
73 import org.apache.hadoop.hbase.client.Result;
74 import org.apache.hadoop.hbase.client.ResultScanner;
75 import org.apache.hadoop.hbase.client.Scan;
76 import org.apache.hadoop.hbase.filter.Filter;
77 import org.apache.hadoop.hbase.filter.ParseFilter;
78 import org.apache.hadoop.hbase.filter.PrefixFilter;
79 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
80 import org.apache.hadoop.hbase.security.SecurityUtil;
81 import org.apache.hadoop.hbase.security.UserProvider;
82 import org.apache.hadoop.hbase.thrift.CallQueue.Call;
83 import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
84 import org.apache.hadoop.hbase.thrift.generated.BatchMutation;
85 import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
86 import org.apache.hadoop.hbase.thrift.generated.Hbase;
87 import org.apache.hadoop.hbase.thrift.generated.IOError;
88 import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
89 import org.apache.hadoop.hbase.thrift.generated.Mutation;
90 import org.apache.hadoop.hbase.thrift.generated.TCell;
91 import org.apache.hadoop.hbase.thrift.generated.TIncrement;
92 import org.apache.hadoop.hbase.thrift.generated.TRegionInfo;
93 import org.apache.hadoop.hbase.thrift.generated.TRowResult;
94 import org.apache.hadoop.hbase.thrift.generated.TScan;
95 import org.apache.hadoop.hbase.util.Bytes;
96 import org.apache.hadoop.hbase.util.ConnectionCache;
97 import org.apache.hadoop.hbase.util.DNS;
98 import org.apache.hadoop.hbase.util.JvmPauseMonitor;
99 import org.apache.hadoop.hbase.util.Strings;
100 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
101 import org.apache.hadoop.security.UserGroupInformation;
102 import org.apache.thrift.TException;
103 import org.apache.thrift.TProcessor;
104 import org.apache.thrift.protocol.TBinaryProtocol;
105 import org.apache.thrift.protocol.TCompactProtocol;
106 import org.apache.thrift.protocol.TProtocol;
107 import org.apache.thrift.protocol.TProtocolFactory;
108 import org.apache.thrift.server.THsHaServer;
109 import org.apache.thrift.server.TNonblockingServer;
110 import org.apache.thrift.server.TServer;
111 import org.apache.thrift.server.TThreadedSelectorServer;
112 import org.apache.thrift.transport.TFramedTransport;
113 import org.apache.thrift.transport.TNonblockingServerSocket;
114 import org.apache.thrift.transport.TNonblockingServerTransport;
115 import org.apache.thrift.transport.TSaslServerTransport;
116 import org.apache.thrift.transport.TServerSocket;
117 import org.apache.thrift.transport.TServerTransport;
118 import org.apache.thrift.transport.TTransportFactory;
119
120 import com.google.common.base.Joiner;
121 import com.google.common.base.Throwables;
122 import com.google.common.util.concurrent.ThreadFactoryBuilder;
123
124
125
126
127
128 @InterfaceAudience.Private
129 @SuppressWarnings("deprecation")
130 public class ThriftServerRunner implements Runnable {
131
132 private static final Log LOG = LogFactory.getLog(ThriftServerRunner.class);
133
134 static final String SERVER_TYPE_CONF_KEY =
135 "hbase.regionserver.thrift.server.type";
136
137 static final String BIND_CONF_KEY = "hbase.regionserver.thrift.ipaddress";
138 static final String COMPACT_CONF_KEY = "hbase.regionserver.thrift.compact";
139 static final String FRAMED_CONF_KEY = "hbase.regionserver.thrift.framed";
140 static final String MAX_FRAME_SIZE_CONF_KEY = "hbase.regionserver.thrift.framed.max_frame_size_in_mb";
141 static final String PORT_CONF_KEY = "hbase.regionserver.thrift.port";
142 static final String COALESCE_INC_KEY = "hbase.regionserver.thrift.coalesceIncrement";
143
144
145
146
147
148
149 public static final String THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY =
150 "hbase.thrift.server.socket.read.timeout";
151 public static final int THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT = 60000;
152
153
154
155
156
157
158
159
160
161
162 static final String THRIFT_QOP_KEY = "hbase.thrift.security.qop";
163 static final String BACKLOG_CONF_KEY = "hbase.regionserver.thrift.backlog";
164
165 private static final String DEFAULT_BIND_ADDR = "0.0.0.0";
166 public static final int DEFAULT_LISTEN_PORT = 9090;
167 private final int listenPort;
168
169 private Configuration conf;
170 volatile TServer tserver;
171 private final Hbase.Iface handler;
172 private final ThriftMetrics metrics;
173 private final HBaseHandler hbaseHandler;
174 private final UserGroupInformation realUser;
175
176 private final String qop;
177 private String host;
178
179 private final JvmPauseMonitor pauseMonitor;
180
181
182 enum ImplType {
183 HS_HA("hsha", true, THsHaServer.class, true),
184 NONBLOCKING("nonblocking", true, TNonblockingServer.class, true),
185 THREAD_POOL("threadpool", false, TBoundedThreadPoolServer.class, true),
186 THREADED_SELECTOR(
187 "threadedselector", true, TThreadedSelectorServer.class, true);
188
189 public static final ImplType DEFAULT = THREAD_POOL;
190
191 final String option;
192 final boolean isAlwaysFramed;
193 final Class<? extends TServer> serverClass;
194 final boolean canSpecifyBindIP;
195
196 ImplType(String option, boolean isAlwaysFramed,
197 Class<? extends TServer> serverClass, boolean canSpecifyBindIP) {
198 this.option = option;
199 this.isAlwaysFramed = isAlwaysFramed;
200 this.serverClass = serverClass;
201 this.canSpecifyBindIP = canSpecifyBindIP;
202 }
203
204
205
206
207
208 @Override
209 public String toString() {
210 return "-" + option;
211 }
212
213 String getDescription() {
214 StringBuilder sb = new StringBuilder("Use the " +
215 serverClass.getSimpleName());
216 if (isAlwaysFramed) {
217 sb.append(" This implies the framed transport.");
218 }
219 if (this == DEFAULT) {
220 sb.append("This is the default.");
221 }
222 return sb.toString();
223 }
224
225 static OptionGroup createOptionGroup() {
226 OptionGroup group = new OptionGroup();
227 for (ImplType t : values()) {
228 group.addOption(new Option(t.option, t.getDescription()));
229 }
230 return group;
231 }
232
233 static ImplType getServerImpl(Configuration conf) {
234 String confType = conf.get(SERVER_TYPE_CONF_KEY, THREAD_POOL.option);
235 for (ImplType t : values()) {
236 if (confType.equals(t.option)) {
237 return t;
238 }
239 }
240 throw new AssertionError("Unknown server ImplType.option:" + confType);
241 }
242
243 static void setServerImpl(CommandLine cmd, Configuration conf) {
244 ImplType chosenType = null;
245 int numChosen = 0;
246 for (ImplType t : values()) {
247 if (cmd.hasOption(t.option)) {
248 chosenType = t;
249 ++numChosen;
250 }
251 }
252 if (numChosen < 1) {
253 LOG.info("Using default thrift server type");
254 chosenType = DEFAULT;
255 } else if (numChosen > 1) {
256 throw new AssertionError("Exactly one option out of " +
257 Arrays.toString(values()) + " has to be specified");
258 }
259 LOG.info("Using thrift server type " + chosenType.option);
260 conf.set(SERVER_TYPE_CONF_KEY, chosenType.option);
261 }
262
263 public String simpleClassName() {
264 return serverClass.getSimpleName();
265 }
266
267 public static List<String> serversThatCannotSpecifyBindIP() {
268 List<String> l = new ArrayList<String>();
269 for (ImplType t : values()) {
270 if (!t.canSpecifyBindIP) {
271 l.add(t.simpleClassName());
272 }
273 }
274 return l;
275 }
276
277 }
278
279 public ThriftServerRunner(Configuration conf) throws IOException {
280 UserProvider userProvider = UserProvider.instantiate(conf);
281
282 boolean securityEnabled = userProvider.isHadoopSecurityEnabled()
283 && userProvider.isHBaseSecurityEnabled();
284 if (securityEnabled) {
285 host = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
286 conf.get("hbase.thrift.dns.interface", "default"),
287 conf.get("hbase.thrift.dns.nameserver", "default")));
288 userProvider.login("hbase.thrift.keytab.file",
289 "hbase.thrift.kerberos.principal", host);
290 }
291 this.conf = HBaseConfiguration.create(conf);
292 this.listenPort = conf.getInt(PORT_CONF_KEY, DEFAULT_LISTEN_PORT);
293 this.metrics = new ThriftMetrics(conf, ThriftMetrics.ThriftServerType.ONE);
294 this.pauseMonitor = new JvmPauseMonitor(conf, this.metrics.getSource());
295 this.hbaseHandler = new HBaseHandler(conf, userProvider);
296 this.hbaseHandler.initMetrics(metrics);
297 this.handler = HbaseHandlerMetricsProxy.newInstance(
298 hbaseHandler, metrics, conf);
299 this.realUser = userProvider.getCurrent().getUGI();
300 qop = conf.get(THRIFT_QOP_KEY);
301 if (qop != null) {
302 if (!qop.equals("auth") && !qop.equals("auth-int")
303 && !qop.equals("auth-conf")) {
304 throw new IOException("Invalid " + THRIFT_QOP_KEY + ": " + qop
305 + ", it must be 'auth', 'auth-int', or 'auth-conf'");
306 }
307 if (!securityEnabled) {
308 throw new IOException("Thrift server must"
309 + " run in secure mode to support authentication");
310 }
311 }
312 }
313
314
315
316
317 @Override
318 public void run() {
319 realUser.doAs(
320 new PrivilegedAction<Object>() {
321 @Override
322 public Object run() {
323 pauseMonitor.start();
324 try {
325 setupServer();
326 tserver.serve();
327 } catch (Exception e) {
328 LOG.fatal("Cannot run ThriftServer", e);
329
330 System.exit(-1);
331 }
332 return null;
333 }
334 });
335 }
336
337 public void shutdown() {
338 if (pauseMonitor != null) {
339 pauseMonitor.stop();
340 }
341 if (tserver != null) {
342 tserver.stop();
343 tserver = null;
344 }
345 }
346
347
348
349
350 private void setupServer() throws Exception {
351
352 TProtocolFactory protocolFactory;
353 if (conf.getBoolean(COMPACT_CONF_KEY, false)) {
354 LOG.debug("Using compact protocol");
355 protocolFactory = new TCompactProtocol.Factory();
356 } else {
357 LOG.debug("Using binary protocol");
358 protocolFactory = new TBinaryProtocol.Factory();
359 }
360
361 final TProcessor p = new Hbase.Processor<Hbase.Iface>(handler);
362 ImplType implType = ImplType.getServerImpl(conf);
363 TProcessor processor = p;
364
365
366 TTransportFactory transportFactory;
367 if (conf.getBoolean(FRAMED_CONF_KEY, false) || implType.isAlwaysFramed) {
368 if (qop != null) {
369 throw new RuntimeException("Thrift server authentication"
370 + " doesn't work with framed transport yet");
371 }
372 transportFactory = new TFramedTransport.Factory(
373 conf.getInt(MAX_FRAME_SIZE_CONF_KEY, 2) * 1024 * 1024);
374 LOG.debug("Using framed transport");
375 } else if (qop == null) {
376 transportFactory = new TTransportFactory();
377 } else {
378
379 String name = SecurityUtil.getUserFromPrincipal(
380 conf.get("hbase.thrift.kerberos.principal"));
381 Map<String, String> saslProperties = new HashMap<String, String>();
382 saslProperties.put(Sasl.QOP, qop);
383 TSaslServerTransport.Factory saslFactory = new TSaslServerTransport.Factory();
384 saslFactory.addServerDefinition("GSSAPI", name, host, saslProperties,
385 new SaslGssCallbackHandler() {
386 @Override
387 public void handle(Callback[] callbacks)
388 throws UnsupportedCallbackException {
389 AuthorizeCallback ac = null;
390 for (Callback callback : callbacks) {
391 if (callback instanceof AuthorizeCallback) {
392 ac = (AuthorizeCallback) callback;
393 } else {
394 throw new UnsupportedCallbackException(callback,
395 "Unrecognized SASL GSSAPI Callback");
396 }
397 }
398 if (ac != null) {
399 String authid = ac.getAuthenticationID();
400 String authzid = ac.getAuthorizationID();
401 if (!authid.equals(authzid)) {
402 ac.setAuthorized(false);
403 } else {
404 ac.setAuthorized(true);
405 String userName = SecurityUtil.getUserFromPrincipal(authzid);
406 LOG.info("Effective user: " + userName);
407 ac.setAuthorizedID(userName);
408 }
409 }
410 }
411 });
412 transportFactory = saslFactory;
413
414
415 processor = new TProcessor() {
416 @Override
417 public boolean process(TProtocol inProt,
418 TProtocol outProt) throws TException {
419 TSaslServerTransport saslServerTransport =
420 (TSaslServerTransport)inProt.getTransport();
421 SaslServer saslServer = saslServerTransport.getSaslServer();
422 String principal = saslServer.getAuthorizationID();
423 hbaseHandler.setEffectiveUser(principal);
424 return p.process(inProt, outProt);
425 }
426 };
427 }
428
429 if (conf.get(BIND_CONF_KEY) != null && !implType.canSpecifyBindIP) {
430 LOG.error("Server types " + Joiner.on(", ").join(
431 ImplType.serversThatCannotSpecifyBindIP()) + " don't support IP " +
432 "address binding at the moment. See " +
433 "https://issues.apache.org/jira/browse/HBASE-2155 for details.");
434 throw new RuntimeException(
435 "-" + BIND_CONF_KEY + " not supported with " + implType);
436 }
437
438 if (implType == ImplType.HS_HA || implType == ImplType.NONBLOCKING ||
439 implType == ImplType.THREADED_SELECTOR) {
440 InetAddress listenAddress = getBindAddress(conf);
441 TNonblockingServerTransport serverTransport = new TNonblockingServerSocket(
442 new InetSocketAddress(listenAddress, listenPort));
443
444 if (implType == ImplType.NONBLOCKING) {
445 TNonblockingServer.Args serverArgs =
446 new TNonblockingServer.Args(serverTransport);
447 serverArgs.processor(processor)
448 .transportFactory(transportFactory)
449 .protocolFactory(protocolFactory);
450 tserver = new TNonblockingServer(serverArgs);
451 } else if (implType == ImplType.HS_HA) {
452 THsHaServer.Args serverArgs = new THsHaServer.Args(serverTransport);
453 CallQueue callQueue =
454 new CallQueue(new LinkedBlockingQueue<Call>(), metrics);
455 ExecutorService executorService = createExecutor(
456 callQueue, serverArgs.getWorkerThreads());
457 serverArgs.executorService(executorService)
458 .processor(processor)
459 .transportFactory(transportFactory)
460 .protocolFactory(protocolFactory);
461 tserver = new THsHaServer(serverArgs);
462 } else {
463 TThreadedSelectorServer.Args serverArgs =
464 new HThreadedSelectorServerArgs(serverTransport, conf);
465 CallQueue callQueue =
466 new CallQueue(new LinkedBlockingQueue<Call>(), metrics);
467 ExecutorService executorService = createExecutor(
468 callQueue, serverArgs.getWorkerThreads());
469 serverArgs.executorService(executorService)
470 .processor(processor)
471 .transportFactory(transportFactory)
472 .protocolFactory(protocolFactory);
473 tserver = new TThreadedSelectorServer(serverArgs);
474 }
475 LOG.info("starting HBase " + implType.simpleClassName() +
476 " server on " + Integer.toString(listenPort));
477 } else if (implType == ImplType.THREAD_POOL) {
478
479 InetAddress listenAddress = getBindAddress(conf);
480 int readTimeout = conf.getInt(THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY,
481 THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT);
482 TServerTransport serverTransport =
483 new TServerSocket(new InetSocketAddress(listenAddress, listenPort), readTimeout);
484
485 TBoundedThreadPoolServer.Args serverArgs =
486 new TBoundedThreadPoolServer.Args(serverTransport, conf);
487 serverArgs.processor(processor)
488 .transportFactory(transportFactory)
489 .protocolFactory(protocolFactory);
490 LOG.info("starting " + ImplType.THREAD_POOL.simpleClassName() + " on "
491 + listenAddress + ":" + Integer.toString(listenPort)
492 + " with readTimeout " + readTimeout + "ms; " + serverArgs);
493 TBoundedThreadPoolServer tserver =
494 new TBoundedThreadPoolServer(serverArgs, metrics);
495 this.tserver = tserver;
496 } else {
497 throw new AssertionError("Unsupported Thrift server implementation: " +
498 implType.simpleClassName());
499 }
500
501
502 if (tserver.getClass() != implType.serverClass) {
503 throw new AssertionError("Expected to create Thrift server class " +
504 implType.serverClass.getName() + " but got " +
505 tserver.getClass().getName());
506 }
507
508
509
510 registerFilters(conf);
511 }
512
513 ExecutorService createExecutor(BlockingQueue<Runnable> callQueue,
514 int workerThreads) {
515 ThreadFactoryBuilder tfb = new ThreadFactoryBuilder();
516 tfb.setDaemon(true);
517 tfb.setNameFormat("thrift-worker-%d");
518 return new ThreadPoolExecutor(workerThreads, workerThreads,
519 Long.MAX_VALUE, TimeUnit.SECONDS, callQueue, tfb.build());
520 }
521
522 private InetAddress getBindAddress(Configuration conf)
523 throws UnknownHostException {
524 String bindAddressStr = conf.get(BIND_CONF_KEY, DEFAULT_BIND_ADDR);
525 return InetAddress.getByName(bindAddressStr);
526 }
527
528 protected static class ResultScannerWrapper {
529
530 private final ResultScanner scanner;
531 private final boolean sortColumns;
532 public ResultScannerWrapper(ResultScanner resultScanner,
533 boolean sortResultColumns) {
534 scanner = resultScanner;
535 sortColumns = sortResultColumns;
536 }
537
538 public ResultScanner getScanner() {
539 return scanner;
540 }
541
542 public boolean isColumnSorted() {
543 return sortColumns;
544 }
545 }
546
547
548
549
550
551 public static class HBaseHandler implements Hbase.Iface {
552 protected Configuration conf;
553 protected final Log LOG = LogFactory.getLog(this.getClass().getName());
554
555
556 protected int nextScannerId = 0;
557 protected HashMap<Integer, ResultScannerWrapper> scannerMap = null;
558 private ThriftMetrics metrics = null;
559
560 private final ConnectionCache connectionCache;
561 IncrementCoalescer coalescer = null;
562
563 static final String CLEANUP_INTERVAL = "hbase.thrift.connection.cleanup-interval";
564 static final String MAX_IDLETIME = "hbase.thrift.connection.max-idletime";
565
566
567
568
569
570
571
572 byte[][] getAllColumns(HTable table) throws IOException {
573 HColumnDescriptor[] cds = table.getTableDescriptor().getColumnFamilies();
574 byte[][] columns = new byte[cds.length][];
575 for (int i = 0; i < cds.length; i++) {
576 columns[i] = Bytes.add(cds[i].getName(),
577 KeyValue.COLUMN_FAMILY_DELIM_ARRAY);
578 }
579 return columns;
580 }
581
582
583
584
585
586
587
588
589
590 public HTableInterface getTable(final byte[] tableName) throws
591 IOException {
592 String table = Bytes.toString(tableName);
593 return connectionCache.getTable(table);
594 }
595
596 public HTableInterface getTable(final ByteBuffer tableName) throws IOException {
597 return getTable(getBytes(tableName));
598 }
599
600
601
602
603
604
605
606
607 protected synchronized int addScanner(ResultScanner scanner,boolean sortColumns) {
608 int id = nextScannerId++;
609 ResultScannerWrapper resultScannerWrapper = new ResultScannerWrapper(scanner, sortColumns);
610 scannerMap.put(id, resultScannerWrapper);
611 return id;
612 }
613
614
615
616
617
618
619
620 protected synchronized ResultScannerWrapper getScanner(int id) {
621 return scannerMap.get(id);
622 }
623
624
625
626
627
628
629
630
631 protected synchronized ResultScannerWrapper removeScanner(int id) {
632 return scannerMap.remove(id);
633 }
634
635 protected HBaseHandler(final Configuration c,
636 final UserProvider userProvider) throws IOException {
637 this.conf = c;
638 scannerMap = new HashMap<Integer, ResultScannerWrapper>();
639 this.coalescer = new IncrementCoalescer(this);
640
641 int cleanInterval = conf.getInt(CLEANUP_INTERVAL, 10 * 1000);
642 int maxIdleTime = conf.getInt(MAX_IDLETIME, 10 * 60 * 1000);
643 connectionCache = new ConnectionCache(
644 conf, userProvider, cleanInterval, maxIdleTime);
645 }
646
647
648
649
650 private HBaseAdmin getHBaseAdmin() throws IOException {
651 return connectionCache.getAdmin();
652 }
653
654 void setEffectiveUser(String effectiveUser) {
655 connectionCache.setEffectiveUser(effectiveUser);
656 }
657
658 @Override
659 public void enableTable(ByteBuffer tableName) throws IOError {
660 try{
661 getHBaseAdmin().enableTable(getBytes(tableName));
662 } catch (IOException e) {
663 LOG.warn(e.getMessage(), e);
664 throw new IOError(e.getMessage());
665 }
666 }
667
668 @Override
669 public void disableTable(ByteBuffer tableName) throws IOError{
670 try{
671 getHBaseAdmin().disableTable(getBytes(tableName));
672 } catch (IOException e) {
673 LOG.warn(e.getMessage(), e);
674 throw new IOError(e.getMessage());
675 }
676 }
677
678 @Override
679 public boolean isTableEnabled(ByteBuffer tableName) throws IOError {
680 try {
681 return HTable.isTableEnabled(this.conf, getBytes(tableName));
682 } catch (IOException e) {
683 LOG.warn(e.getMessage(), e);
684 throw new IOError(e.getMessage());
685 }
686 }
687
688 @Override
689 public void compact(ByteBuffer tableNameOrRegionName) throws IOError {
690 try{
691 getHBaseAdmin().compact(getBytes(tableNameOrRegionName));
692 } catch (InterruptedException e) {
693 throw new IOError(e.getMessage());
694 } catch (IOException e) {
695 LOG.warn(e.getMessage(), e);
696 throw new IOError(e.getMessage());
697 }
698 }
699
700 @Override
701 public void majorCompact(ByteBuffer tableNameOrRegionName) throws IOError {
702 try{
703 getHBaseAdmin().majorCompact(getBytes(tableNameOrRegionName));
704 } catch (InterruptedException e) {
705 LOG.warn(e.getMessage(), e);
706 throw new IOError(e.getMessage());
707 } catch (IOException e) {
708 LOG.warn(e.getMessage(), e);
709 throw new IOError(e.getMessage());
710 }
711 }
712
713 @Override
714 public List<ByteBuffer> getTableNames() throws IOError {
715 try {
716 TableName[] tableNames = this.getHBaseAdmin().listTableNames();
717 ArrayList<ByteBuffer> list = new ArrayList<ByteBuffer>(tableNames.length);
718 for (int i = 0; i < tableNames.length; i++) {
719 list.add(ByteBuffer.wrap(tableNames[i].getName()));
720 }
721 return list;
722 } catch (IOException e) {
723 LOG.warn(e.getMessage(), e);
724 throw new IOError(e.getMessage());
725 }
726 }
727
728
729
730
731 @Override
732 public List<TRegionInfo> getTableRegions(ByteBuffer tableName)
733 throws IOError {
734 try {
735 HTableInterface table;
736 try {
737 table = getTable(tableName);
738 } catch (TableNotFoundException ex) {
739 return new ArrayList<TRegionInfo>();
740 }
741 Map<HRegionInfo, ServerName> regionLocations = ((HTable)table).getRegionLocations();
742 List<TRegionInfo> results = new ArrayList<TRegionInfo>();
743 for (Map.Entry<HRegionInfo, ServerName> entry :
744 regionLocations.entrySet()) {
745 HRegionInfo info = entry.getKey();
746 ServerName serverName = entry.getValue();
747 TRegionInfo region = new TRegionInfo();
748 region.serverName = ByteBuffer.wrap(
749 Bytes.toBytes(serverName.getHostname()));
750 region.port = serverName.getPort();
751 region.startKey = ByteBuffer.wrap(info.getStartKey());
752 region.endKey = ByteBuffer.wrap(info.getEndKey());
753 region.id = info.getRegionId();
754 region.name = ByteBuffer.wrap(info.getRegionName());
755 region.version = info.getVersion();
756 results.add(region);
757 }
758 return results;
759 } catch (TableNotFoundException e) {
760
761 return Collections.emptyList();
762 } catch (IOException e){
763 LOG.warn(e.getMessage(), e);
764 throw new IOError(e.getMessage());
765 }
766 }
767
768 @Deprecated
769 @Override
770 public List<TCell> get(
771 ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
772 Map<ByteBuffer, ByteBuffer> attributes)
773 throws IOError {
774 byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
775 if (famAndQf.length == 1) {
776 return get(tableName, row, famAndQf[0], null, attributes);
777 }
778 if (famAndQf.length == 2) {
779 return get(tableName, row, famAndQf[0], famAndQf[1], attributes);
780 }
781 throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
782 }
783
784
785
786
787
788
789
790
791 protected List<TCell> get(ByteBuffer tableName,
792 ByteBuffer row,
793 byte[] family,
794 byte[] qualifier,
795 Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
796 HTableInterface table = null;
797 try {
798 table = getTable(tableName);
799 Get get = new Get(getBytes(row));
800 addAttributes(get, attributes);
801 if (qualifier == null) {
802 get.addFamily(family);
803 } else {
804 get.addColumn(family, qualifier);
805 }
806 Result result = table.get(get);
807 return ThriftUtilities.cellFromHBase(result.rawCells());
808 } catch (IOException e) {
809 LOG.warn(e.getMessage(), e);
810 throw new IOError(Throwables.getStackTraceAsString(e));
811 } finally {
812 closeTable(table);
813 }
814 }
815
816 @Deprecated
817 @Override
818 public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
819 int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
820 byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
821 if(famAndQf.length == 1) {
822 return getVer(tableName, row, famAndQf[0], null, numVersions, attributes);
823 }
824 if (famAndQf.length == 2) {
825 return getVer(tableName, row, famAndQf[0], famAndQf[1], numVersions, attributes);
826 }
827 throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
828
829 }
830
831
832
833
834
835
836
837
838
839 public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, byte[] family,
840 byte[] qualifier, int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
841 HTableInterface table = null;
842 try {
843 table = getTable(tableName);
844 Get get = new Get(getBytes(row));
845 addAttributes(get, attributes);
846 if (null == qualifier) {
847 get.addFamily(family);
848 } else {
849 get.addColumn(family, qualifier);
850 }
851 get.setMaxVersions(numVersions);
852 Result result = table.get(get);
853 return ThriftUtilities.cellFromHBase(result.rawCells());
854 } catch (IOException e) {
855 LOG.warn(e.getMessage(), e);
856 throw new IOError(Throwables.getStackTraceAsString(e));
857 } finally{
858 closeTable(table);
859 }
860 }
861
862 @Deprecated
863 @Override
864 public List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
865 long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
866 byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
867 if (famAndQf.length == 1) {
868 return getVerTs(tableName, row, famAndQf[0], null, timestamp, numVersions, attributes);
869 }
870 if (famAndQf.length == 2) {
871 return getVerTs(tableName, row, famAndQf[0], famAndQf[1], timestamp, numVersions,
872 attributes);
873 }
874 throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
875 }
876
877
878
879
880
881
882
883
884 protected List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, byte[] family,
885 byte[] qualifier, long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes)
886 throws IOError {
887 HTableInterface table = null;
888 try {
889 table = getTable(tableName);
890 Get get = new Get(getBytes(row));
891 addAttributes(get, attributes);
892 if (null == qualifier) {
893 get.addFamily(family);
894 } else {
895 get.addColumn(family, qualifier);
896 }
897 get.setTimeRange(0, timestamp);
898 get.setMaxVersions(numVersions);
899 Result result = table.get(get);
900 return ThriftUtilities.cellFromHBase(result.rawCells());
901 } catch (IOException e) {
902 LOG.warn(e.getMessage(), e);
903 throw new IOError(Throwables.getStackTraceAsString(e));
904 } finally{
905 closeTable(table);
906 }
907 }
908
909 @Override
910 public List<TRowResult> getRow(ByteBuffer tableName, ByteBuffer row,
911 Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
912 return getRowWithColumnsTs(tableName, row, null,
913 HConstants.LATEST_TIMESTAMP,
914 attributes);
915 }
916
917 @Override
918 public List<TRowResult> getRowWithColumns(ByteBuffer tableName,
919 ByteBuffer row,
920 List<ByteBuffer> columns,
921 Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
922 return getRowWithColumnsTs(tableName, row, columns,
923 HConstants.LATEST_TIMESTAMP,
924 attributes);
925 }
926
927 @Override
928 public List<TRowResult> getRowTs(ByteBuffer tableName, ByteBuffer row,
929 long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
930 return getRowWithColumnsTs(tableName, row, null,
931 timestamp, attributes);
932 }
933
934 @Override
935 public List<TRowResult> getRowWithColumnsTs(
936 ByteBuffer tableName, ByteBuffer row, List<ByteBuffer> columns,
937 long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
938 HTableInterface table = null;
939 try {
940 table = getTable(tableName);
941 if (columns == null) {
942 Get get = new Get(getBytes(row));
943 addAttributes(get, attributes);
944 get.setTimeRange(0, timestamp);
945 Result result = table.get(get);
946 return ThriftUtilities.rowResultFromHBase(result);
947 }
948 Get get = new Get(getBytes(row));
949 addAttributes(get, attributes);
950 for(ByteBuffer column : columns) {
951 byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
952 if (famAndQf.length == 1) {
953 get.addFamily(famAndQf[0]);
954 } else {
955 get.addColumn(famAndQf[0], famAndQf[1]);
956 }
957 }
958 get.setTimeRange(0, timestamp);
959 Result result = table.get(get);
960 return ThriftUtilities.rowResultFromHBase(result);
961 } catch (IOException e) {
962 LOG.warn(e.getMessage(), e);
963 throw new IOError(Throwables.getStackTraceAsString(e));
964 } finally{
965 closeTable(table);
966 }
967 }
968
969 @Override
970 public List<TRowResult> getRows(ByteBuffer tableName,
971 List<ByteBuffer> rows,
972 Map<ByteBuffer, ByteBuffer> attributes)
973 throws IOError {
974 return getRowsWithColumnsTs(tableName, rows, null,
975 HConstants.LATEST_TIMESTAMP,
976 attributes);
977 }
978
979 @Override
980 public List<TRowResult> getRowsWithColumns(ByteBuffer tableName,
981 List<ByteBuffer> rows,
982 List<ByteBuffer> columns,
983 Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
984 return getRowsWithColumnsTs(tableName, rows, columns,
985 HConstants.LATEST_TIMESTAMP,
986 attributes);
987 }
988
989 @Override
990 public List<TRowResult> getRowsTs(ByteBuffer tableName,
991 List<ByteBuffer> rows,
992 long timestamp,
993 Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
994 return getRowsWithColumnsTs(tableName, rows, null,
995 timestamp, attributes);
996 }
997
998 @Override
999 public List<TRowResult> getRowsWithColumnsTs(ByteBuffer tableName,
1000 List<ByteBuffer> rows,
1001 List<ByteBuffer> columns, long timestamp,
1002 Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
1003 HTableInterface table = null;
1004 try {
1005 List<Get> gets = new ArrayList<Get>(rows.size());
1006 table = getTable(tableName);
1007 if (metrics != null) {
1008 metrics.incNumRowKeysInBatchGet(rows.size());
1009 }
1010 for (ByteBuffer row : rows) {
1011 Get get = new Get(getBytes(row));
1012 addAttributes(get, attributes);
1013 if (columns != null) {
1014
1015 for(ByteBuffer column : columns) {
1016 byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
1017 if (famAndQf.length == 1) {
1018 get.addFamily(famAndQf[0]);
1019 } else {
1020 get.addColumn(famAndQf[0], famAndQf[1]);
1021 }
1022 }
1023 }
1024 get.setTimeRange(0, timestamp);
1025 gets.add(get);
1026 }
1027 Result[] result = table.get(gets);
1028 return ThriftUtilities.rowResultFromHBase(result);
1029 } catch (IOException e) {
1030 LOG.warn(e.getMessage(), e);
1031 throw new IOError(Throwables.getStackTraceAsString(e));
1032 } finally{
1033 closeTable(table);
1034 }
1035 }
1036
1037 @Override
1038 public void deleteAll(
1039 ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
1040 Map<ByteBuffer, ByteBuffer> attributes)
1041 throws IOError {
1042 deleteAllTs(tableName, row, column, HConstants.LATEST_TIMESTAMP,
1043 attributes);
1044 }
1045
1046 @Override
1047 public void deleteAllTs(ByteBuffer tableName,
1048 ByteBuffer row,
1049 ByteBuffer column,
1050 long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
1051 HTableInterface table = null;
1052 try {
1053 table = getTable(tableName);
1054 Delete delete = new Delete(getBytes(row));
1055 addAttributes(delete, attributes);
1056 byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
1057 if (famAndQf.length == 1) {
1058 delete.deleteFamily(famAndQf[0], timestamp);
1059 } else {
1060 delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
1061 }
1062 table.delete(delete);
1063
1064 } catch (IOException e) {
1065 LOG.warn(e.getMessage(), e);
1066 throw new IOError(Throwables.getStackTraceAsString(e));
1067 } finally {
1068 closeTable(table);
1069 }
1070 }
1071
1072 @Override
1073 public void deleteAllRow(
1074 ByteBuffer tableName, ByteBuffer row,
1075 Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
1076 deleteAllRowTs(tableName, row, HConstants.LATEST_TIMESTAMP, attributes);
1077 }
1078
1079 @Override
1080 public void deleteAllRowTs(
1081 ByteBuffer tableName, ByteBuffer row, long timestamp,
1082 Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
1083 HTableInterface table = null;
1084 try {
1085 table = getTable(tableName);
1086 Delete delete = new Delete(getBytes(row), timestamp);
1087 addAttributes(delete, attributes);
1088 table.delete(delete);
1089 } catch (IOException e) {
1090 LOG.warn(e.getMessage(), e);
1091 throw new IOError(Throwables.getStackTraceAsString(e));
1092 } finally {
1093 closeTable(table);
1094 }
1095 }
1096
1097 @Override
1098 public void createTable(ByteBuffer in_tableName,
1099 List<ColumnDescriptor> columnFamilies) throws IOError,
1100 IllegalArgument, AlreadyExists {
1101 byte [] tableName = getBytes(in_tableName);
1102 try {
1103 if (getHBaseAdmin().tableExists(tableName)) {
1104 throw new AlreadyExists("table name already in use");
1105 }
1106 HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
1107 for (ColumnDescriptor col : columnFamilies) {
1108 HColumnDescriptor colDesc = ThriftUtilities.colDescFromThrift(col);
1109 desc.addFamily(colDesc);
1110 }
1111 getHBaseAdmin().createTable(desc);
1112 } catch (IOException e) {
1113 LOG.warn(e.getMessage(), e);
1114 throw new IOError(e.getMessage());
1115 } catch (IllegalArgumentException e) {
1116 LOG.warn(e.getMessage(), e);
1117 throw new IllegalArgument(e.getMessage());
1118 }
1119 }
1120
1121 @Override
1122 public void deleteTable(ByteBuffer in_tableName) throws IOError {
1123 byte [] tableName = getBytes(in_tableName);
1124 if (LOG.isDebugEnabled()) {
1125 LOG.debug("deleteTable: table=" + Bytes.toString(tableName));
1126 }
1127 try {
1128 if (!getHBaseAdmin().tableExists(tableName)) {
1129 throw new IOException("table does not exist");
1130 }
1131 getHBaseAdmin().deleteTable(tableName);
1132 } catch (IOException e) {
1133 LOG.warn(e.getMessage(), e);
1134 throw new IOError(e.getMessage());
1135 }
1136 }
1137
1138 @Override
1139 public void mutateRow(ByteBuffer tableName, ByteBuffer row,
1140 List<Mutation> mutations, Map<ByteBuffer, ByteBuffer> attributes)
1141 throws IOError, IllegalArgument {
1142 mutateRowTs(tableName, row, mutations, HConstants.LATEST_TIMESTAMP,
1143 attributes);
1144 }
1145
1146 @Override
1147 public void mutateRowTs(ByteBuffer tableName, ByteBuffer row,
1148 List<Mutation> mutations, long timestamp,
1149 Map<ByteBuffer, ByteBuffer> attributes)
1150 throws IOError, IllegalArgument {
1151 HTableInterface table = null;
1152 try {
1153 table = getTable(tableName);
1154 Put put = new Put(getBytes(row), timestamp);
1155 addAttributes(put, attributes);
1156
1157 Delete delete = new Delete(getBytes(row));
1158 addAttributes(delete, attributes);
1159 if (metrics != null) {
1160 metrics.incNumRowKeysInBatchMutate(mutations.size());
1161 }
1162
1163
1164 for (Mutation m : mutations) {
1165 byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
1166 if (m.isDelete) {
1167 if (famAndQf.length == 1) {
1168 delete.deleteFamily(famAndQf[0], timestamp);
1169 } else {
1170 delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
1171 }
1172 delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL
1173 : Durability.SKIP_WAL);
1174 } else {
1175 if(famAndQf.length == 1) {
1176 LOG.warn("No column qualifier specified. Delete is the only mutation supported "
1177 + "over the whole column family.");
1178 } else {
1179 put.addImmutable(famAndQf[0], famAndQf[1],
1180 m.value != null ? getBytes(m.value)
1181 : HConstants.EMPTY_BYTE_ARRAY);
1182 }
1183 put.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
1184 }
1185 }
1186 if (!delete.isEmpty())
1187 table.delete(delete);
1188 if (!put.isEmpty())
1189 table.put(put);
1190 } catch (IOException e) {
1191 LOG.warn(e.getMessage(), e);
1192 throw new IOError(e.getMessage());
1193 } catch (IllegalArgumentException e) {
1194 LOG.warn(e.getMessage(), e);
1195 throw new IllegalArgument(Throwables.getStackTraceAsString(e));
1196 } finally{
1197 closeTable(table);
1198 }
1199 }
1200
1201 @Override
1202 public void mutateRows(ByteBuffer tableName, List<BatchMutation> rowBatches,
1203 Map<ByteBuffer, ByteBuffer> attributes)
1204 throws IOError, IllegalArgument, TException {
1205 mutateRowsTs(tableName, rowBatches, HConstants.LATEST_TIMESTAMP, attributes);
1206 }
1207
1208 @Override
1209 public void mutateRowsTs(
1210 ByteBuffer tableName, List<BatchMutation> rowBatches, long timestamp,
1211 Map<ByteBuffer, ByteBuffer> attributes)
1212 throws IOError, IllegalArgument, TException {
1213 List<Put> puts = new ArrayList<Put>();
1214 List<Delete> deletes = new ArrayList<Delete>();
1215
1216 for (BatchMutation batch : rowBatches) {
1217 byte[] row = getBytes(batch.row);
1218 List<Mutation> mutations = batch.mutations;
1219 Delete delete = new Delete(row);
1220 addAttributes(delete, attributes);
1221 Put put = new Put(row, timestamp);
1222 addAttributes(put, attributes);
1223 for (Mutation m : mutations) {
1224 byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
1225 if (m.isDelete) {
1226
1227 if (famAndQf.length == 1) {
1228 delete.deleteFamily(famAndQf[0], timestamp);
1229 } else {
1230 delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
1231 }
1232 delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL
1233 : Durability.SKIP_WAL);
1234 } else {
1235 if (famAndQf.length == 1) {
1236 LOG.warn("No column qualifier specified. Delete is the only mutation supported "
1237 + "over the whole column family.");
1238 }
1239 if (famAndQf.length == 2) {
1240 put.addImmutable(famAndQf[0], famAndQf[1],
1241 m.value != null ? getBytes(m.value)
1242 : HConstants.EMPTY_BYTE_ARRAY);
1243 } else {
1244 throw new IllegalArgumentException("Invalid famAndQf provided.");
1245 }
1246 put.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
1247 }
1248 }
1249 if (!delete.isEmpty())
1250 deletes.add(delete);
1251 if (!put.isEmpty())
1252 puts.add(put);
1253 }
1254 HTableInterface table = null;
1255 try {
1256 table = getTable(tableName);
1257 if (!puts.isEmpty())
1258 table.put(puts);
1259 if (!deletes.isEmpty())
1260 table.delete(deletes);
1261
1262 } catch (IOException e) {
1263 LOG.warn(e.getMessage(), e);
1264 throw new IOError(e.getMessage());
1265 } catch (IllegalArgumentException e) {
1266 LOG.warn(e.getMessage(), e);
1267 throw new IllegalArgument(Throwables.getStackTraceAsString(e));
1268 } finally{
1269 closeTable(table);
1270 }
1271 }
1272
1273 @Deprecated
1274 @Override
1275 public long atomicIncrement(
1276 ByteBuffer tableName, ByteBuffer row, ByteBuffer column, long amount)
1277 throws IOError, IllegalArgument, TException {
1278 byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
1279 if(famAndQf.length == 1) {
1280 return atomicIncrement(tableName, row, famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, amount);
1281 }
1282 return atomicIncrement(tableName, row, famAndQf[0], famAndQf[1], amount);
1283 }
1284
1285 protected long atomicIncrement(ByteBuffer tableName, ByteBuffer row,
1286 byte [] family, byte [] qualifier, long amount)
1287 throws IOError, IllegalArgument, TException {
1288 HTableInterface table = null;
1289 try {
1290 table = getTable(tableName);
1291 return table.incrementColumnValue(
1292 getBytes(row), family, qualifier, amount);
1293 } catch (IOException e) {
1294 LOG.warn(e.getMessage(), e);
1295 throw new IOError(Throwables.getStackTraceAsString(e));
1296 } finally {
1297 closeTable(table);
1298 }
1299 }
1300
1301 @Override
1302 public void scannerClose(int id) throws IOError, IllegalArgument {
1303 LOG.debug("scannerClose: id=" + id);
1304 ResultScannerWrapper resultScannerWrapper = getScanner(id);
1305 if (resultScannerWrapper == null) {
1306 String message = "scanner ID is invalid";
1307 LOG.warn(message);
1308 throw new IllegalArgument("scanner ID is invalid");
1309 }
1310 resultScannerWrapper.getScanner().close();
1311 removeScanner(id);
1312 }
1313
1314 @Override
1315 public List<TRowResult> scannerGetList(int id,int nbRows)
1316 throws IllegalArgument, IOError {
1317 LOG.debug("scannerGetList: id=" + id);
1318 ResultScannerWrapper resultScannerWrapper = getScanner(id);
1319 if (null == resultScannerWrapper) {
1320 String message = "scanner ID is invalid";
1321 LOG.warn(message);
1322 throw new IllegalArgument("scanner ID is invalid");
1323 }
1324
1325 Result [] results = null;
1326 try {
1327 results = resultScannerWrapper.getScanner().next(nbRows);
1328 if (null == results) {
1329 return new ArrayList<TRowResult>();
1330 }
1331 } catch (IOException e) {
1332 LOG.warn(e.getMessage(), e);
1333 throw new IOError(e.getMessage());
1334 }
1335 return ThriftUtilities.rowResultFromHBase(results, resultScannerWrapper.isColumnSorted());
1336 }
1337
1338 @Override
1339 public List<TRowResult> scannerGet(int id) throws IllegalArgument, IOError {
1340 return scannerGetList(id,1);
1341 }
1342
1343 @Override
1344 public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
1345 Map<ByteBuffer, ByteBuffer> attributes)
1346 throws IOError {
1347 HTableInterface table = null;
1348 try {
1349 table = getTable(tableName);
1350 Scan scan = new Scan();
1351 addAttributes(scan, attributes);
1352 if (tScan.isSetStartRow()) {
1353 scan.setStartRow(tScan.getStartRow());
1354 }
1355 if (tScan.isSetStopRow()) {
1356 scan.setStopRow(tScan.getStopRow());
1357 }
1358 if (tScan.isSetTimestamp()) {
1359 scan.setTimeRange(0, tScan.getTimestamp());
1360 }
1361 if (tScan.isSetCaching()) {
1362 scan.setCaching(tScan.getCaching());
1363 }
1364 if (tScan.isSetBatchSize()) {
1365 scan.setBatch(tScan.getBatchSize());
1366 }
1367 if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
1368 for(ByteBuffer column : tScan.getColumns()) {
1369 byte [][] famQf = KeyValue.parseColumn(getBytes(column));
1370 if(famQf.length == 1) {
1371 scan.addFamily(famQf[0]);
1372 } else {
1373 scan.addColumn(famQf[0], famQf[1]);
1374 }
1375 }
1376 }
1377 if (tScan.isSetFilterString()) {
1378 ParseFilter parseFilter = new ParseFilter();
1379 scan.setFilter(
1380 parseFilter.parseFilterString(tScan.getFilterString()));
1381 }
1382 if (tScan.isSetReversed()) {
1383 scan.setReversed(tScan.isReversed());
1384 }
1385 return addScanner(table.getScanner(scan), tScan.sortColumns);
1386 } catch (IOException e) {
1387 LOG.warn(e.getMessage(), e);
1388 throw new IOError(Throwables.getStackTraceAsString(e));
1389 } finally{
1390 closeTable(table);
1391 }
1392 }
1393
1394 @Override
1395 public int scannerOpen(ByteBuffer tableName, ByteBuffer startRow,
1396 List<ByteBuffer> columns,
1397 Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
1398 HTableInterface table = null;
1399 try {
1400 table = getTable(tableName);
1401 Scan scan = new Scan(getBytes(startRow));
1402 addAttributes(scan, attributes);
1403 if(columns != null && columns.size() != 0) {
1404 for(ByteBuffer column : columns) {
1405 byte [][] famQf = KeyValue.parseColumn(getBytes(column));
1406 if(famQf.length == 1) {
1407 scan.addFamily(famQf[0]);
1408 } else {
1409 scan.addColumn(famQf[0], famQf[1]);
1410 }
1411 }
1412 }
1413 return addScanner(table.getScanner(scan), false);
1414 } catch (IOException e) {
1415 LOG.warn(e.getMessage(), e);
1416 throw new IOError(Throwables.getStackTraceAsString(e));
1417 } finally{
1418 closeTable(table);
1419 }
1420 }
1421
1422 @Override
1423 public int scannerOpenWithStop(ByteBuffer tableName, ByteBuffer startRow,
1424 ByteBuffer stopRow, List<ByteBuffer> columns,
1425 Map<ByteBuffer, ByteBuffer> attributes)
1426 throws IOError, TException {
1427 HTableInterface table = null;
1428 try {
1429 table = getTable(tableName);
1430 Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
1431 addAttributes(scan, attributes);
1432 if(columns != null && columns.size() != 0) {
1433 for(ByteBuffer column : columns) {
1434 byte [][] famQf = KeyValue.parseColumn(getBytes(column));
1435 if(famQf.length == 1) {
1436 scan.addFamily(famQf[0]);
1437 } else {
1438 scan.addColumn(famQf[0], famQf[1]);
1439 }
1440 }
1441 }
1442 return addScanner(table.getScanner(scan), false);
1443 } catch (IOException e) {
1444 LOG.warn(e.getMessage(), e);
1445 throw new IOError(Throwables.getStackTraceAsString(e));
1446 } finally{
1447 closeTable(table);
1448 }
1449 }
1450
1451 @Override
1452 public int scannerOpenWithPrefix(ByteBuffer tableName,
1453 ByteBuffer startAndPrefix,
1454 List<ByteBuffer> columns,
1455 Map<ByteBuffer, ByteBuffer> attributes)
1456 throws IOError, TException {
1457 HTableInterface table = null;
1458 try {
1459 table = getTable(tableName);
1460 Scan scan = new Scan(getBytes(startAndPrefix));
1461 addAttributes(scan, attributes);
1462 Filter f = new WhileMatchFilter(
1463 new PrefixFilter(getBytes(startAndPrefix)));
1464 scan.setFilter(f);
1465 if (columns != null && columns.size() != 0) {
1466 for(ByteBuffer column : columns) {
1467 byte [][] famQf = KeyValue.parseColumn(getBytes(column));
1468 if(famQf.length == 1) {
1469 scan.addFamily(famQf[0]);
1470 } else {
1471 scan.addColumn(famQf[0], famQf[1]);
1472 }
1473 }
1474 }
1475 return addScanner(table.getScanner(scan), false);
1476 } catch (IOException e) {
1477 LOG.warn(e.getMessage(), e);
1478 throw new IOError(Throwables.getStackTraceAsString(e));
1479 } finally{
1480 closeTable(table);
1481 }
1482 }
1483
1484 @Override
1485 public int scannerOpenTs(ByteBuffer tableName, ByteBuffer startRow,
1486 List<ByteBuffer> columns, long timestamp,
1487 Map<ByteBuffer, ByteBuffer> attributes) throws IOError, TException {
1488 HTableInterface table = null;
1489 try {
1490 table = getTable(tableName);
1491 Scan scan = new Scan(getBytes(startRow));
1492 addAttributes(scan, attributes);
1493 scan.setTimeRange(0, timestamp);
1494 if (columns != null && columns.size() != 0) {
1495 for (ByteBuffer column : columns) {
1496 byte [][] famQf = KeyValue.parseColumn(getBytes(column));
1497 if(famQf.length == 1) {
1498 scan.addFamily(famQf[0]);
1499 } else {
1500 scan.addColumn(famQf[0], famQf[1]);
1501 }
1502 }
1503 }
1504 return addScanner(table.getScanner(scan), false);
1505 } catch (IOException e) {
1506 LOG.warn(e.getMessage(), e);
1507 throw new IOError(Throwables.getStackTraceAsString(e));
1508 } finally{
1509 closeTable(table);
1510 }
1511 }
1512
1513 @Override
1514 public int scannerOpenWithStopTs(ByteBuffer tableName, ByteBuffer startRow,
1515 ByteBuffer stopRow, List<ByteBuffer> columns, long timestamp,
1516 Map<ByteBuffer, ByteBuffer> attributes)
1517 throws IOError, TException {
1518 HTableInterface table = null;
1519 try {
1520 table = getTable(tableName);
1521 Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
1522 addAttributes(scan, attributes);
1523 scan.setTimeRange(0, timestamp);
1524 if (columns != null && columns.size() != 0) {
1525 for (ByteBuffer column : columns) {
1526 byte [][] famQf = KeyValue.parseColumn(getBytes(column));
1527 if(famQf.length == 1) {
1528 scan.addFamily(famQf[0]);
1529 } else {
1530 scan.addColumn(famQf[0], famQf[1]);
1531 }
1532 }
1533 }
1534 scan.setTimeRange(0, timestamp);
1535 return addScanner(table.getScanner(scan), false);
1536 } catch (IOException e) {
1537 LOG.warn(e.getMessage(), e);
1538 throw new IOError(Throwables.getStackTraceAsString(e));
1539 } finally{
1540 closeTable(table);
1541 }
1542 }
1543
1544 @Override
1545 public Map<ByteBuffer, ColumnDescriptor> getColumnDescriptors(
1546 ByteBuffer tableName) throws IOError, TException {
1547 HTableInterface table = null;
1548 try {
1549 TreeMap<ByteBuffer, ColumnDescriptor> columns =
1550 new TreeMap<ByteBuffer, ColumnDescriptor>();
1551
1552 table = getTable(tableName);
1553 HTableDescriptor desc = table.getTableDescriptor();
1554
1555 for (HColumnDescriptor e : desc.getFamilies()) {
1556 ColumnDescriptor col = ThriftUtilities.colDescFromHbase(e);
1557 columns.put(col.name, col);
1558 }
1559 return columns;
1560 } catch (IOException e) {
1561 LOG.warn(e.getMessage(), e);
1562 throw new IOError(Throwables.getStackTraceAsString(e));
1563 } finally {
1564 closeTable(table);
1565 }
1566 }
1567
1568 @Override
1569 public List<TCell> getRowOrBefore(ByteBuffer tableName, ByteBuffer row,
1570 ByteBuffer family) throws IOError {
1571 HTableInterface table = null;
1572 try {
1573 table = getTable(getBytes(tableName));
1574 Result result = table.getRowOrBefore(getBytes(row), getBytes(family));
1575 return ThriftUtilities.cellFromHBase(result.rawCells());
1576 } catch (IOException e) {
1577 LOG.warn(e.getMessage(), e);
1578 throw new IOError(e.getMessage());
1579 } finally {
1580 closeTable(table);
1581 }
1582 }
1583
1584 @Override
1585 public TRegionInfo getRegionInfo(ByteBuffer searchRow) throws IOError {
1586 HTableInterface table = null;
1587 try {
1588 table = getTable(TableName.META_TABLE_NAME.getName());
1589 byte[] row = getBytes(searchRow);
1590 Result startRowResult = table.getRowOrBefore(
1591 row, HConstants.CATALOG_FAMILY);
1592
1593 if (startRowResult == null) {
1594 throw new IOException("Cannot find row in "+ TableName.META_TABLE_NAME+", row="
1595 + Bytes.toStringBinary(row));
1596 }
1597
1598
1599 HRegionInfo regionInfo = HRegionInfo.getHRegionInfo(startRowResult);
1600 if (regionInfo == null) {
1601 throw new IOException("HRegionInfo REGIONINFO was null or " +
1602 " empty in Meta for row="
1603 + Bytes.toStringBinary(row));
1604 }
1605 TRegionInfo region = new TRegionInfo();
1606 region.setStartKey(regionInfo.getStartKey());
1607 region.setEndKey(regionInfo.getEndKey());
1608 region.id = regionInfo.getRegionId();
1609 region.setName(regionInfo.getRegionName());
1610 region.version = regionInfo.getVersion();
1611
1612
1613 ServerName serverName = HRegionInfo.getServerName(startRowResult);
1614 if (serverName != null) {
1615 region.setServerName(Bytes.toBytes(serverName.getHostname()));
1616 region.port = serverName.getPort();
1617 }
1618 return region;
1619 } catch (IOException e) {
1620 LOG.warn(e.getMessage(), e);
1621 throw new IOError(Throwables.getStackTraceAsString(e));
1622 } finally {
1623 closeTable(table);
1624 }
1625 }
1626
1627 private void closeTable(HTableInterface table) throws IOError
1628 {
1629 try{
1630 if(table != null){
1631 table.close();
1632 }
1633 } catch (IOException e){
1634 LOG.error(e.getMessage(), e);
1635 throw new IOError(Throwables.getStackTraceAsString(e));
1636 }
1637 }
1638
1639 private void initMetrics(ThriftMetrics metrics) {
1640 this.metrics = metrics;
1641 }
1642
1643 @Override
1644 public void increment(TIncrement tincrement) throws IOError, TException {
1645
1646 if (tincrement.getRow().length == 0 || tincrement.getTable().length == 0) {
1647 throw new TException("Must supply a table and a row key; can't increment");
1648 }
1649
1650 if (conf.getBoolean(COALESCE_INC_KEY, false)) {
1651 this.coalescer.queueIncrement(tincrement);
1652 return;
1653 }
1654
1655 HTableInterface table = null;
1656 try {
1657 table = getTable(tincrement.getTable());
1658 Increment inc = ThriftUtilities.incrementFromThrift(tincrement);
1659 table.increment(inc);
1660 } catch (IOException e) {
1661 LOG.warn(e.getMessage(), e);
1662 throw new IOError(Throwables.getStackTraceAsString(e));
1663 } finally{
1664 closeTable(table);
1665 }
1666 }
1667
1668 @Override
1669 public void incrementRows(List<TIncrement> tincrements) throws IOError, TException {
1670 if (conf.getBoolean(COALESCE_INC_KEY, false)) {
1671 this.coalescer.queueIncrements(tincrements);
1672 return;
1673 }
1674 for (TIncrement tinc : tincrements) {
1675 increment(tinc);
1676 }
1677 }
1678 }
1679
1680
1681
1682
1683 private static void addAttributes(OperationWithAttributes op,
1684 Map<ByteBuffer, ByteBuffer> attributes) {
1685 if (attributes == null || attributes.size() == 0) {
1686 return;
1687 }
1688 for (Map.Entry<ByteBuffer, ByteBuffer> entry : attributes.entrySet()) {
1689 String name = Bytes.toStringBinary(getBytes(entry.getKey()));
1690 byte[] value = getBytes(entry.getValue());
1691 op.setAttribute(name, value);
1692 }
1693 }
1694
1695 public static void registerFilters(Configuration conf) {
1696 String[] filters = conf.getStrings("hbase.thrift.filters");
1697 if(filters != null) {
1698 for(String filterClass: filters) {
1699 String[] filterPart = filterClass.split(":");
1700 if(filterPart.length != 2) {
1701 LOG.warn("Invalid filter specification " + filterClass + " - skipping");
1702 } else {
1703 ParseFilter.registerFilter(filterPart[0], filterPart[1]);
1704 }
1705 }
1706 }
1707 }
1708 }