View Javadoc

1   /*
2    *
3    * Licensed to the Apache Software Foundation (ASF) under one
4    * or more contributor license agreements.  See the NOTICE file
5    * distributed with this work for additional information
6    * regarding copyright ownership.  The ASF licenses this file
7    * to you under the Apache License, Version 2.0 (the
8    * "License"); you may not use this file except in compliance
9    * with the License.  You may obtain a copy of the License at
10   *
11   *     http://www.apache.org/licenses/LICENSE-2.0
12   *
13   * Unless required by applicable law or agreed to in writing, software
14   * distributed under the License is distributed on an "AS IS" BASIS,
15   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16   * See the License for the specific language governing permissions and
17   * limitations under the License.
18   */
19  
20  package org.apache.hadoop.hbase.master;
21  
22  import static org.junit.Assert.fail;
23  
24  import java.io.IOException;
25  import java.net.SocketTimeoutException;
26  
27  import org.apache.hadoop.conf.Configuration;
28  import org.apache.hadoop.hbase.*;
29  import org.apache.hadoop.hbase.ipc.RpcClient;
30  import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
31  import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
32  import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest;
33  import org.apache.hadoop.hbase.security.User;
34  import org.apache.hadoop.hbase.testclassification.MediumTests;
35  import org.junit.Test;
36  import org.junit.experimental.categories.Category;
37  
38  import com.google.protobuf.BlockingRpcChannel;
39  import com.google.protobuf.ServiceException;
40  
41  @Category(MediumTests.class)
42  public class TestHMasterRPCException {
43  
44    @Test
45    public void testRPCException() throws Exception {
46      HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
47      TEST_UTIL.startMiniZKCluster();
48      Configuration conf = TEST_UTIL.getConfiguration();
49      conf.set(HConstants.MASTER_PORT, "0");
50      HMaster hm = new HMaster(conf);
51      ServerName sm = hm.getServerName();
52      RpcClient rpcClient = new RpcClient(conf, HConstants.CLUSTER_ID_DEFAULT);
53      try {
54        int i = 0;
55        //retry the RPC a few times; we have seen SocketTimeoutExceptions if we
56        //try to connect too soon. Retry on SocketTimeoutException.
57        while (i < 20) {
58          try {
59            BlockingRpcChannel channel =
60              rpcClient.createBlockingRpcChannel(sm, User.getCurrent(), 0);
61            MasterProtos.MasterService.BlockingInterface stub =
62              MasterProtos.MasterService.newBlockingStub(channel);
63            stub.isMasterRunning(null, IsMasterRunningRequest.getDefaultInstance());
64            fail();
65          } catch (ServiceException ex) {
66            IOException ie = ProtobufUtil.getRemoteException(ex);
67            if (!(ie instanceof SocketTimeoutException)) {
68              if (ie.getMessage().startsWith("org.apache.hadoop.hbase.ipc." +
69                  "ServerNotRunningYetException: Server is not running yet")) {
70                // Done.  Got the exception we wanted.
71                System.out.println("Expected exception: " + ie.getMessage());
72                return;
73              } else {
74                throw ex;
75              }
76            } else {
77              System.err.println("Got SocketTimeoutException. Will retry. ");
78            }
79          } catch (Throwable t) {
80            fail("Unexpected throwable: " + t);
81          }
82          Thread.sleep(100);
83          i++;
84        }
85        fail();
86      } finally {
87        rpcClient.stop();
88      }
89    }
90  }