1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase;
19
20 import java.util.Map.Entry;
21
22 import org.apache.commons.logging.Log;
23 import org.apache.commons.logging.LogFactory;
24 import org.apache.hadoop.classification.InterfaceAudience;
25 import org.apache.hadoop.classification.InterfaceStability;
26 import org.apache.hadoop.conf.Configuration;
27 import org.apache.hadoop.hbase.util.VersionInfo;
28
29
30
31
32 @InterfaceAudience.Public
33 @InterfaceStability.Stable
34 public class HBaseConfiguration extends Configuration {
35
36 private static final Log LOG = LogFactory.getLog(HBaseConfiguration.class);
37
38
39 private static final int CONVERT_TO_PERCENTAGE = 100;
40
41
42
43
44
45 @Deprecated
46 public HBaseConfiguration() {
47
48 super();
49 addHbaseResources(this);
50 LOG.warn("instantiating HBaseConfiguration() is deprecated. Please use"
51 + " HBaseConfiguration#create() to construct a plain Configuration");
52 }
53
54
55
56
57
58 @Deprecated
59 public HBaseConfiguration(final Configuration c) {
60
61 this();
62 merge(this, c);
63 }
64
65 private static void checkDefaultsVersion(Configuration conf) {
66 if (conf.getBoolean("hbase.defaults.for.version.skip", Boolean.FALSE)) return;
67 String defaultsVersion = conf.get("hbase.defaults.for.version");
68 String thisVersion = VersionInfo.getVersion();
69 if (!thisVersion.equals(defaultsVersion)) {
70 throw new RuntimeException(
71 "hbase-default.xml file seems to be for and old version of HBase (" +
72 defaultsVersion + "), this version is " + thisVersion);
73 }
74 }
75
76 private static void checkForClusterFreeMemoryLimit(Configuration conf) {
77 float globalMemstoreLimit = conf.getFloat("hbase.regionserver.global.memstore.upperLimit", 0.4f);
78 int gml = (int)(globalMemstoreLimit * CONVERT_TO_PERCENTAGE);
79 float blockCacheUpperLimit =
80 conf.getFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY,
81 HConstants.HFILE_BLOCK_CACHE_SIZE_DEFAULT);
82 int bcul = (int)(blockCacheUpperLimit * CONVERT_TO_PERCENTAGE);
83 if (CONVERT_TO_PERCENTAGE - (gml + bcul)
84 < (int)(CONVERT_TO_PERCENTAGE *
85 HConstants.HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD)) {
86 throw new RuntimeException(
87 "Current heap configuration for MemStore and BlockCache exceeds " +
88 "the threshold required for successful cluster operation. " +
89 "The combined value cannot exceed 0.8. Please check " +
90 "the settings for hbase.regionserver.global.memstore.upperLimit and " +
91 "hfile.block.cache.size in your configuration. " +
92 "hbase.regionserver.global.memstore.upperLimit is " +
93 globalMemstoreLimit +
94 " hfile.block.cache.size is " + blockCacheUpperLimit);
95 }
96 }
97
98 public static Configuration addHbaseResources(Configuration conf) {
99 conf.addResource("hbase-default.xml");
100 conf.addResource("hbase-site.xml");
101
102 checkDefaultsVersion(conf);
103 checkForClusterFreeMemoryLimit(conf);
104 return conf;
105 }
106
107
108
109
110
111 public static Configuration create() {
112 Configuration conf = new Configuration();
113 return addHbaseResources(conf);
114 }
115
116
117
118
119
120
121 public static Configuration create(final Configuration that) {
122 Configuration conf = create();
123 merge(conf, that);
124 return conf;
125 }
126
127
128
129
130
131
132
133 public static void merge(Configuration destConf, Configuration srcConf) {
134 for (Entry<String, String> e : srcConf) {
135 destConf.set(e.getKey(), e.getValue());
136 }
137 }
138
139
140
141
142 public static boolean isShowConfInServlet() {
143 boolean isShowConf = false;
144 try {
145 if (Class.forName("org.apache.hadoop.conf.ConfServlet") != null) {
146 isShowConf = true;
147 }
148 } catch (Exception e) {
149 }
150 return isShowConf;
151 }
152
153
154
155
156
157 public static void main(String[] args) throws Exception {
158 HBaseConfiguration.create().writeXml(System.out);
159 }
160 }