1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.regionserver.throttle;
19
20 import org.apache.commons.logging.Log;
21 import org.apache.commons.logging.LogFactory;
22 import org.apache.hadoop.conf.Configuration;
23 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
24 import org.apache.hadoop.hbase.classification.InterfaceAudience;
25 import org.apache.hadoop.hbase.regionserver.RegionServerServices;
26 import org.apache.hadoop.util.ReflectionUtils;
27
28 @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
29 public final class FlushThroughputControllerFactory {
30
31 private static final Log LOG = LogFactory.getLog(FlushThroughputControllerFactory.class);
32
33 public static final String HBASE_FLUSH_THROUGHPUT_CONTROLLER_KEY =
34 "hbase.regionserver.flush.throughput.controller";
35
36 private static final Class<? extends ThroughputController>
37 DEFAULT_FLUSH_THROUGHPUT_CONTROLLER_CLASS = NoLimitThroughputController.class;
38
39 private FlushThroughputControllerFactory() {
40 }
41
42 public static ThroughputController create(RegionServerServices server,
43 Configuration conf) {
44 Class<? extends ThroughputController> clazz = getThroughputControllerClass(conf);
45 ThroughputController controller = ReflectionUtils.newInstance(clazz, conf);
46 controller.setup(server);
47 return controller;
48 }
49
50 public static Class<? extends ThroughputController> getThroughputControllerClass(
51 Configuration conf) {
52 String className =
53 conf.get(HBASE_FLUSH_THROUGHPUT_CONTROLLER_KEY,
54 DEFAULT_FLUSH_THROUGHPUT_CONTROLLER_CLASS.getName());
55 try {
56 return Class.forName(className).asSubclass(ThroughputController.class);
57 } catch (Exception e) {
58 LOG.warn(
59 "Unable to load configured flush throughput controller '" + className
60 + "', load default throughput controller "
61 + DEFAULT_FLUSH_THROUGHPUT_CONTROLLER_CLASS.getName() + " instead", e);
62 return DEFAULT_FLUSH_THROUGHPUT_CONTROLLER_CLASS;
63 }
64 }
65 }