1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.io.hfile;
19
20 import java.io.IOException;
21 import java.nio.ByteBuffer;
22 import java.util.NavigableMap;
23 import java.util.NavigableSet;
24 import java.util.concurrent.ConcurrentSkipListMap;
25 import java.util.concurrent.ConcurrentSkipListSet;
26
27 import org.apache.commons.logging.Log;
28 import org.apache.commons.logging.LogFactory;
29 import org.apache.hadoop.hbase.classification.InterfaceAudience;
30 import org.apache.hadoop.conf.Configuration;
31 import org.apache.hadoop.hbase.metrics.impl.FastLongHistogram;
32 import org.apache.hadoop.hbase.util.Bytes;
33 import org.apache.hadoop.hbase.util.GsonUtil;
34
35
36 import org.apache.hbase.thirdparty.com.google.gson.Gson;
37 import org.apache.hbase.thirdparty.com.google.gson.TypeAdapter;
38 import org.apache.hbase.thirdparty.com.google.gson.stream.JsonReader;
39 import org.apache.hbase.thirdparty.com.google.gson.stream.JsonWriter;
40
41
42
43
44
45 @InterfaceAudience.Private
46 public class BlockCacheUtil {
47
48 private static final Log LOG = LogFactory.getLog(BlockCacheUtil.class);
49
50 public static final long NANOS_PER_SECOND = 1000000000;
51
52
53
54
55 private static final Gson GSON = GsonUtil.createGson()
56 .registerTypeAdapter(FastLongHistogram.class, new TypeAdapter<FastLongHistogram>() {
57
58 @Override
59 public void write(JsonWriter out, FastLongHistogram value) throws IOException {
60 AgeSnapshot snapshot = new AgeSnapshot(value);
61 out.beginObject();
62 out.name("mean").value(snapshot.getMean());
63 out.name("min").value(snapshot.getMin());
64 out.name("max").value(snapshot.getMax());
65 out.name("75thPercentile").value(snapshot.get75thPercentile());
66 out.name("95thPercentile").value(snapshot.get95thPercentile());
67 out.name("98thPercentile").value(snapshot.get98thPercentile());
68 out.name("99thPercentile").value(snapshot.get99thPercentile());
69 out.name("999thPercentile").value(snapshot.get999thPercentile());
70 out.endObject();
71 }
72
73 @Override
74 public FastLongHistogram read(JsonReader in) throws IOException {
75 throw new UnsupportedOperationException();
76 }
77 }).setPrettyPrinting().create();
78
79
80
81
82
83 public static String toString(final CachedBlock cb, final long now) {
84 return "filename=" + cb.getFilename() + ", " + toStringMinusFileName(cb, now);
85 }
86
87
88
89
90
91 static class CachedBlockCountsPerFile {
92 private int count = 0;
93 private long size = 0;
94 private int countData = 0;
95 private long sizeData = 0;
96 private final String filename;
97
98 CachedBlockCountsPerFile(final String filename) {
99 this.filename = filename;
100 }
101
102 public int getCount() {
103 return count;
104 }
105
106 public long getSize() {
107 return size;
108 }
109
110 public int getCountData() {
111 return countData;
112 }
113
114 public long getSizeData() {
115 return sizeData;
116 }
117
118 public String getFilename() {
119 return filename;
120 }
121 }
122
123
124
125
126 public static String toJSON(final String filename, final NavigableSet<CachedBlock> blocks)
127 throws IOException {
128 CachedBlockCountsPerFile counts = new CachedBlockCountsPerFile(filename);
129 for (CachedBlock cb: blocks) {
130 counts.count++;
131 counts.size += cb.getSize();
132 BlockType bt = cb.getBlockType();
133 if (bt != null && bt.isData()) {
134 counts.countData++;
135 counts.sizeData += cb.getSize();
136 }
137 }
138 return GSON.toJson(counts);
139 }
140
141
142
143
144 public static String toJSON(final CachedBlocksByFile cbsbf) throws IOException {
145 return GSON.toJson(cbsbf);
146 }
147
148
149
150
151 public static String toJSON(final BlockCache bc) throws IOException {
152 return GSON.toJson(bc);
153 }
154
155
156
157
158
159 public static String toStringMinusFileName(final CachedBlock cb, final long now) {
160 return "offset=" + cb.getOffset() +
161 ", size=" + cb.getSize() +
162 ", age=" + (now - cb.getCachedTime()) +
163 ", type=" + cb.getBlockType() +
164 ", priority=" + cb.getBlockPriority();
165 }
166
167
168
169
170
171
172
173
174 public static CachedBlocksByFile getLoadedCachedBlocksByFile(final Configuration conf,
175 final BlockCache bc) {
176 CachedBlocksByFile cbsbf = new CachedBlocksByFile(conf);
177 for (CachedBlock cb: bc) {
178 if (cbsbf.update(cb)) break;
179 }
180 return cbsbf;
181 }
182
183 private static int compareCacheBlock(Cacheable left, Cacheable right,
184 boolean includeNextBlockOnDiskSize) {
185 ByteBuffer l = ByteBuffer.allocate(left.getSerializedLength());
186 left.serialize(l, includeNextBlockOnDiskSize);
187 ByteBuffer r = ByteBuffer.allocate(right.getSerializedLength());
188 right.serialize(r, includeNextBlockOnDiskSize);
189 return Bytes.compareTo(l.array(), l.arrayOffset(), l.limit(),
190 r.array(), r.arrayOffset(), r.limit());
191 }
192
193
194
195
196
197
198
199
200
201
202 public static int validateBlockAddition(Cacheable existing, Cacheable newBlock,
203 BlockCacheKey cacheKey) {
204 int comparison = compareCacheBlock(existing, newBlock, false);
205 if (comparison != 0) {
206 throw new RuntimeException(
207 "Cached block contents differ, which should not have happened." + "cacheKey:" + cacheKey);
208 }
209 if ((existing instanceof HFileBlock) && (newBlock instanceof HFileBlock)) {
210 comparison = ((HFileBlock) existing).getNextBlockOnDiskSize()
211 - ((HFileBlock) newBlock).getNextBlockOnDiskSize();
212 }
213 return comparison;
214 }
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230 public static boolean shouldReplaceExistingCacheBlock(BlockCache blockCache,
231 BlockCacheKey cacheKey, Cacheable newBlock) {
232 Cacheable existingBlock = blockCache.getBlock(cacheKey, false, false, false);
233 if (existingBlock == null) {
234 return true;
235 }
236 int comparison = BlockCacheUtil.validateBlockAddition(existingBlock, newBlock, cacheKey);
237 if (comparison < 0) {
238 LOG.warn("Cached block contents differ by nextBlockOnDiskSize, the new block has "
239 + "nextBlockOnDiskSize set. Caching new block.");
240 return true;
241 } else if (comparison > 0) {
242 LOG.warn("Cached block contents differ by nextBlockOnDiskSize, the existing block has "
243 + "nextBlockOnDiskSize set, Keeping cached block.");
244 return false;
245 } else {
246 LOG.warn("Caching an already cached block: " + cacheKey
247 + ". This is harmless and can happen in rare " + "cases (see HBASE-8547)");
248 return false;
249 }
250 }
251
252
253
254
255
256
257 public static class CachedBlocksByFile {
258 private int count;
259 private int dataBlockCount;
260 private long size;
261 private long dataSize;
262 private final long now = System.nanoTime();
263
264
265
266
267
268
269
270 private final int max;
271 public static final int DEFAULT_MAX = 1000000;
272
273 CachedBlocksByFile() {
274 this(null);
275 }
276
277 CachedBlocksByFile(final Configuration c) {
278 this.max = c == null? DEFAULT_MAX: c.getInt("hbase.ui.blockcache.by.file.max", DEFAULT_MAX);
279 }
280
281
282
283
284 private transient NavigableMap<String, NavigableSet<CachedBlock>> cachedBlockByFile =
285 new ConcurrentSkipListMap<String, NavigableSet<CachedBlock>>();
286 FastLongHistogram hist = new FastLongHistogram();
287
288
289
290
291
292 public boolean update(final CachedBlock cb) {
293 if (isFull()) return true;
294 NavigableSet<CachedBlock> set = this.cachedBlockByFile.get(cb.getFilename());
295 if (set == null) {
296 set = new ConcurrentSkipListSet<CachedBlock>();
297 this.cachedBlockByFile.put(cb.getFilename(), set);
298 }
299 set.add(cb);
300 this.size += cb.getSize();
301 this.count++;
302 BlockType bt = cb.getBlockType();
303 if (bt != null && bt.isData()) {
304 this.dataBlockCount++;
305 this.dataSize += cb.getSize();
306 }
307 long age = (this.now - cb.getCachedTime())/NANOS_PER_SECOND;
308 this.hist.add(age, 1);
309 return false;
310 }
311
312
313
314
315
316
317 public boolean isFull() {
318 return this.count >= this.max;
319 }
320
321 public NavigableMap<String, NavigableSet<CachedBlock>> getCachedBlockStatsByFile() {
322 return this.cachedBlockByFile;
323 }
324
325
326
327
328 public int getCount() {
329 return count;
330 }
331
332 public int getDataCount() {
333 return dataBlockCount;
334 }
335
336
337
338
339 public long getSize() {
340 return size;
341 }
342
343
344
345
346 public long getDataSize() {
347 return dataSize;
348 }
349
350 public AgeSnapshot getAgeInCacheSnapshot() {
351 return new AgeSnapshot(this.hist);
352 }
353
354 @Override
355 public String toString() {
356 AgeSnapshot snapshot = getAgeInCacheSnapshot();
357 return "count=" + count + ", dataBlockCount=" + dataBlockCount + ", size=" + size +
358 ", dataSize=" + getDataSize() +
359 ", mean age=" + snapshot.getMean() +
360 ", min age=" + snapshot.getMin() +
361 ", max age=" + snapshot.getMax() +
362 ", 75th percentile age=" + snapshot.get75thPercentile() +
363 ", 95th percentile age=" + snapshot.get95thPercentile() +
364 ", 98th percentile age=" + snapshot.get98thPercentile() +
365 ", 99th percentile age=" + snapshot.get99thPercentile() +
366 ", 99.9th percentile age=" + snapshot.get99thPercentile();
367 }
368 }
369 }