View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.ipc;
19  
20  import com.google.protobuf.BlockingRpcChannel;
21  import com.google.protobuf.RpcChannel;
22  
23  import java.io.Closeable;
24  
25  import org.apache.hadoop.hbase.ServerName;
26  import org.apache.hadoop.hbase.classification.InterfaceAudience;
27  import org.apache.hadoop.hbase.security.User;
28  
29  /**
30   * Interface for RpcClient implementations so ConnectionManager can handle it.
31   */
32  @InterfaceAudience.Private
33  public interface RpcClient extends Closeable {
34    String FAILED_SERVER_EXPIRY_KEY = "hbase.ipc.client.failed.servers.expiry";
35    int FAILED_SERVER_EXPIRY_DEFAULT = 2000;
36    String IDLE_TIME = "hbase.ipc.client.connection.minIdleTimeBeforeClose";
37    String IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY =
38        "hbase.ipc.client.fallback-to-simple-auth-allowed";
39    boolean IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_DEFAULT = false;
40    String SPECIFIC_WRITE_THREAD = "hbase.ipc.client.specificThreadForWriting";
41    String DEFAULT_CODEC_CLASS = "hbase.client.default.rpc.codec";
42  
43    String SOCKET_TIMEOUT_CONNECT = "hbase.ipc.client.socket.timeout.connect";
44    /**
45     * How long we wait when we wait for an answer. It's not the operation time, it's the time
46     * we wait when we start to receive an answer, when the remote write starts to send the data.
47     */
48    String SOCKET_TIMEOUT_READ = "hbase.ipc.client.socket.timeout.read";
49    String SOCKET_TIMEOUT_WRITE = "hbase.ipc.client.socket.timeout.write";
50    int DEFAULT_SOCKET_TIMEOUT_CONNECT = 10000; // 10 seconds
51    int DEFAULT_SOCKET_TIMEOUT_READ = 20000; // 20 seconds
52    int DEFAULT_SOCKET_TIMEOUT_WRITE = 60000; // 60 seconds
53  
54    // Used by the server, for compatibility with old clients.
55    // The client in 0.99+ does not ping the server.
56    int PING_CALL_ID = -1;
57  
58    /**
59     * Creates a "channel" that can be used by a blocking protobuf service.  Useful setting up
60     * protobuf blocking stubs.
61     *
62     * @param sn server name describing location of server
63     * @param user which is to use the connection
64     * @param rpcTimeout default rpc operation timeout
65     *
66     * @return A blocking rpc channel that goes via this rpc client instance.
67     */
68    BlockingRpcChannel createBlockingRpcChannel(ServerName sn, User user, int rpcTimeout);
69  
70    /**
71     * Creates a "channel" that can be used by a protobuf service.  Useful setting up
72     * protobuf stubs.
73     *
74     * @param sn server name describing location of server
75     * @param user which is to use the connection
76     * @param rpcTimeout default rpc operation timeout
77     *
78     * @return A rpc channel that goes via this rpc client instance.
79     */
80    RpcChannel createRpcChannel(final ServerName sn, final User user, int rpcTimeout);
81  
82    /**
83     * Interrupt the connections to the given server. This should be called if the server
84     * is known as actually dead. This will not prevent current operation to be retried, and,
85     * depending on their own behavior, they may retry on the same server. This can be a feature,
86     * for example at startup. In any case, they're likely to get connection refused (if the
87     * process died) or no route to host: i.e. their next retries should be faster and with a
88     * safe exception.
89     * @param sn server location to cancel connections of
90     */
91    void cancelConnections(ServerName sn);
92  
93    /**
94     * Stop all threads related to this client.  No further calls may be made
95     * using this client.
96     */
97    @Override
98    void close();
99  
100   /**
101    * @return true when this client uses a {@link org.apache.hadoop.hbase.codec.Codec} and so
102    *         supports cell blocks.
103    */
104   boolean hasCellBlockSupport();
105 }