1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.master.cleaner;
19
20 import static org.junit.Assert.assertEquals;
21 import static org.junit.Assert.assertFalse;
22 import static org.junit.Assert.assertTrue;
23
24 import java.io.IOException;
25 import java.util.List;
26 import java.util.Random;
27
28 import org.apache.commons.logging.Log;
29 import org.apache.commons.logging.LogFactory;
30 import org.apache.hadoop.conf.Configuration;
31 import org.apache.hadoop.fs.FSDataOutputStream;
32 import org.apache.hadoop.fs.FileStatus;
33 import org.apache.hadoop.fs.FileSystem;
34 import org.apache.hadoop.fs.Path;
35 import org.apache.hadoop.hbase.ChoreService;
36 import org.apache.hadoop.hbase.CoordinatedStateManager;
37 import org.apache.hadoop.hbase.HBaseTestingUtility;
38 import org.apache.hadoop.hbase.HConstants;
39 import org.apache.hadoop.hbase.Server;
40 import org.apache.hadoop.hbase.ServerName;
41 import org.apache.hadoop.hbase.client.ClusterConnection;
42 import org.apache.hadoop.hbase.testclassification.MediumTests;
43 import org.apache.hadoop.hbase.util.EnvironmentEdge;
44 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
45 import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
46 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
47 import org.junit.AfterClass;
48 import org.junit.Assert;
49 import org.junit.BeforeClass;
50 import org.junit.Test;
51 import org.junit.experimental.categories.Category;
52
53 @Category(MediumTests.class)
54 public class TestHFileCleaner {
55 private static final Log LOG = LogFactory.getLog(TestHFileCleaner.class);
56
57 private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
58
59 private static DirScanPool POOL;
60
61 @BeforeClass
62 public static void setupCluster() throws Exception {
63
64 UTIL.startMiniDFSCluster(1);
65 POOL = new DirScanPool(UTIL.getConfiguration());
66 }
67
68 @AfterClass
69 public static void shutdownCluster() throws IOException {
70 UTIL.shutdownMiniDFSCluster();
71 POOL.shutdownNow();
72 }
73
74 @Test
75 public void testTTLCleaner() throws IOException, InterruptedException {
76 FileSystem fs = UTIL.getDFSCluster().getFileSystem();
77 Path root = UTIL.getDataTestDirOnTestFS();
78 Path file = new Path(root, "file");
79 fs.createNewFile(file);
80 long createTime = System.currentTimeMillis();
81 assertTrue("Test file not created!", fs.exists(file));
82 TimeToLiveHFileCleaner cleaner = new TimeToLiveHFileCleaner();
83
84 fs.setTimes(file, createTime - 100, -1);
85 Configuration conf = UTIL.getConfiguration();
86 conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, 100);
87 cleaner.setConf(conf);
88 assertTrue("File not set deletable - check mod time:" + getFileStats(file, fs)
89 + " with create time:" + createTime, cleaner.isFileDeletable(fs.getFileStatus(file)));
90 }
91
92
93
94
95
96 private String getFileStats(Path file, FileSystem fs) throws IOException {
97 FileStatus status = fs.getFileStatus(file);
98 return "File" + file + ", mtime:" + status.getModificationTime() + ", atime:"
99 + status.getAccessTime();
100 }
101
102 @Test(timeout = 60 *1000)
103 public void testHFileCleaning() throws Exception {
104 final EnvironmentEdge originalEdge = EnvironmentEdgeManager.getDelegate();
105 String prefix = "someHFileThatWouldBeAUUID";
106 Configuration conf = UTIL.getConfiguration();
107
108 long ttl = 2000;
109 conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS,
110 "org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner");
111 conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, ttl);
112 Server server = new DummyServer();
113 Path archivedHfileDir =
114 new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
115 FileSystem fs = FileSystem.get(conf);
116 HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir, POOL);
117
118
119 final long createTime = System.currentTimeMillis();
120 fs.delete(archivedHfileDir, true);
121 fs.mkdirs(archivedHfileDir);
122
123 fs.createNewFile(new Path(archivedHfileDir, "dfd-dfd"));
124
125
126 LOG.debug("Now is: " + createTime);
127 for (int i = 1; i < 32; i++) {
128
129
130 Path fileName = new Path(archivedHfileDir, (prefix + "." + (createTime + i)));
131 fs.createNewFile(fileName);
132
133 fs.setTimes(fileName, createTime - ttl - 1, -1);
134 LOG.debug("Creating " + getFileStats(fileName, fs));
135 }
136
137
138
139 Path saved = new Path(archivedHfileDir, prefix + ".00000000000");
140 fs.createNewFile(saved);
141
142 fs.setTimes(saved, createTime - ttl / 2, -1);
143 LOG.debug("Creating " + getFileStats(saved, fs));
144 for (FileStatus stat : fs.listStatus(archivedHfileDir)) {
145 LOG.debug(stat.getPath().toString());
146 }
147
148 assertEquals(33, fs.listStatus(archivedHfileDir).length);
149
150
151 EnvironmentEdge setTime = new EnvironmentEdge() {
152 @Override
153 public long currentTime() {
154 return createTime;
155 }
156 };
157 EnvironmentEdgeManager.injectEdge(setTime);
158
159
160 cleaner.chore();
161
162
163 assertEquals(1, fs.listStatus(archivedHfileDir).length);
164
165 for (FileStatus file : fs.listStatus(archivedHfileDir)) {
166 LOG.debug("Kept hfiles: " + file.getPath().getName());
167 }
168
169
170 EnvironmentEdgeManager.injectEdge(originalEdge);
171 }
172
173 @Test
174 public void testRemovesEmptyDirectories() throws Exception {
175 Configuration conf = UTIL.getConfiguration();
176
177 conf.setStrings(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS, "");
178 Server server = new DummyServer();
179 Path archivedHfileDir =
180 new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
181
182
183 FileSystem fs = UTIL.getDFSCluster().getFileSystem();
184 HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir, POOL);
185
186
187 Path table = new Path(archivedHfileDir, "table");
188 Path region = new Path(table, "regionsomthing");
189 Path family = new Path(region, "fam");
190 Path file = new Path(family, "file12345");
191 fs.mkdirs(family);
192 if (!fs.exists(family)) throw new RuntimeException("Couldn't create test family:" + family);
193 fs.create(file).close();
194 if (!fs.exists(file)) throw new RuntimeException("Test file didn't get created:" + file);
195
196
197 cleaner.chore();
198
199
200 assertFalse("family directory not removed for empty directory", fs.exists(family));
201 assertFalse("region directory not removed for empty directory", fs.exists(region));
202 assertFalse("table directory not removed for empty directory", fs.exists(table));
203 assertTrue("archive directory", fs.exists(archivedHfileDir));
204 }
205
206 static class DummyServer implements Server {
207
208 @Override
209 public Configuration getConfiguration() {
210 return UTIL.getConfiguration();
211 }
212
213 @Override
214 public ZooKeeperWatcher getZooKeeper() {
215 try {
216 return new ZooKeeperWatcher(getConfiguration(), "dummy server", this);
217 } catch (IOException e) {
218 e.printStackTrace();
219 }
220 return null;
221 }
222
223 @Override
224 public CoordinatedStateManager getCoordinatedStateManager() {
225 return null;
226 }
227
228 @Override
229 public ClusterConnection getConnection() {
230 return null;
231 }
232
233 @Override
234 public MetaTableLocator getMetaTableLocator() {
235 return null;
236 }
237
238 @Override
239 public ServerName getServerName() {
240 return ServerName.valueOf("regionserver,60020,000000");
241 }
242
243 @Override
244 public void abort(String why, Throwable e) {
245 }
246
247 @Override
248 public boolean isAborted() {
249 return false;
250 }
251
252 @Override
253 public void stop(String why) {
254 }
255
256 @Override
257 public boolean isStopped() {
258 return false;
259 }
260
261 @Override
262 public ChoreService getChoreService() {
263 return null;
264 }
265 }
266
267 @Test
268 public void testThreadCleanup() throws Exception {
269 Configuration conf = UTIL.getConfiguration();
270 conf.setStrings(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS, "");
271 Server server = new DummyServer();
272 Path archivedHfileDir =
273 new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
274
275
276 FileSystem fs = UTIL.getDFSCluster().getFileSystem();
277 HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir, POOL);
278
279 fs.delete(archivedHfileDir, true);
280 fs.mkdirs(archivedHfileDir);
281
282 fs.createNewFile(new Path(archivedHfileDir, "dfd-dfd"));
283
284 cleaner.chore();
285
286 cleaner.cleanup();
287
288 Thread.sleep(100);
289 for (Thread thread : cleaner.getCleanerThreads()) {
290 Assert.assertFalse(thread.isAlive());
291 }
292 }
293
294 @Test
295 public void testLargeSmallIsolation() throws Exception {
296 Configuration conf = UTIL.getConfiguration();
297
298 conf.setStrings(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS, "");
299 conf.setInt(HFileCleaner.HFILE_DELETE_THROTTLE_THRESHOLD, 512 * 1024);
300 Server server = new DummyServer();
301 Path archivedHfileDir =
302 new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
303
304
305 FileSystem fs = UTIL.getDFSCluster().getFileSystem();
306 HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir, POOL);
307
308 fs.delete(archivedHfileDir, true);
309 fs.mkdirs(archivedHfileDir);
310
311 final int LARGE_FILE_NUM = 5;
312 final int SMALL_FILE_NUM = 20;
313 createFilesForTesting(LARGE_FILE_NUM, SMALL_FILE_NUM, fs, archivedHfileDir);
314
315 cleaner.chore();
316
317 Assert.assertEquals(LARGE_FILE_NUM, cleaner.getNumOfDeletedLargeFiles());
318 Assert.assertEquals(SMALL_FILE_NUM, cleaner.getNumOfDeletedSmallFiles());
319 }
320
321 @Test(timeout = 60 * 1000)
322 public void testOnConfigurationChange() throws Exception {
323
324 final int ORIGINAL_THROTTLE_POINT = 512 * 1024;
325 final int ORIGINAL_QUEUE_INIT_SIZE = 512;
326 final int UPDATE_THROTTLE_POINT = 1024;
327 final int UPDATE_QUEUE_INIT_SIZE = 1024;
328 final int LARGE_FILE_NUM = 5;
329 final int SMALL_FILE_NUM = 20;
330 final int LARGE_THREAD_NUM = 2;
331 final int SMALL_THREAD_NUM = 4;
332 final long THREAD_TIMEOUT_MSEC = 30 * 1000L;
333 final long THREAD_CHECK_INTERVAL_MSEC = 500L;
334
335 Configuration conf = UTIL.getConfiguration();
336
337 conf.setStrings(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS, "");
338 conf.setInt(HFileCleaner.HFILE_DELETE_THROTTLE_THRESHOLD, ORIGINAL_THROTTLE_POINT);
339 conf.setInt(HFileCleaner.LARGE_HFILE_QUEUE_INIT_SIZE, ORIGINAL_QUEUE_INIT_SIZE);
340 conf.setInt(HFileCleaner.SMALL_HFILE_QUEUE_INIT_SIZE, ORIGINAL_QUEUE_INIT_SIZE);
341 Server server = new DummyServer();
342 Path archivedHfileDir =
343 new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
344
345
346 FileSystem fs = UTIL.getDFSCluster().getFileSystem();
347 final HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir, POOL);
348 Assert.assertEquals(ORIGINAL_THROTTLE_POINT, cleaner.getThrottlePoint());
349 Assert.assertEquals(ORIGINAL_QUEUE_INIT_SIZE, cleaner.getLargeQueueInitSize());
350 Assert.assertEquals(ORIGINAL_QUEUE_INIT_SIZE, cleaner.getSmallQueueInitSize());
351 Assert.assertEquals(HFileCleaner.DEFAULT_HFILE_DELETE_THREAD_TIMEOUT_MSEC,
352 cleaner.getCleanerThreadTimeoutMsec());
353 Assert.assertEquals(HFileCleaner.DEFAULT_HFILE_DELETE_THREAD_CHECK_INTERVAL_MSEC,
354 cleaner.getCleanerThreadCheckIntervalMsec());
355
356
357 fs.delete(archivedHfileDir, true);
358 fs.mkdirs(archivedHfileDir);
359 createFilesForTesting(LARGE_FILE_NUM, SMALL_FILE_NUM, fs, archivedHfileDir);
360
361
362 Thread t = new Thread() {
363 @Override
364 public void run() {
365 cleaner.chore();
366 }
367 };
368 t.setDaemon(true);
369 t.start();
370
371 while (cleaner.getNumOfDeletedSmallFiles() == 0) {
372 Thread.yield();
373 }
374
375
376 Configuration newConf = new Configuration(conf);
377 newConf.setInt(HFileCleaner.HFILE_DELETE_THROTTLE_THRESHOLD, UPDATE_THROTTLE_POINT);
378 newConf.setInt(HFileCleaner.LARGE_HFILE_QUEUE_INIT_SIZE, UPDATE_QUEUE_INIT_SIZE);
379 newConf.setInt(HFileCleaner.SMALL_HFILE_QUEUE_INIT_SIZE, UPDATE_QUEUE_INIT_SIZE);
380 newConf.setInt(HFileCleaner.LARGE_HFILE_DELETE_THREAD_NUMBER, LARGE_THREAD_NUM);
381 newConf.setInt(HFileCleaner.SMALL_HFILE_DELETE_THREAD_NUMBER, SMALL_THREAD_NUM);
382 newConf.setLong(HFileCleaner.HFILE_DELETE_THREAD_TIMEOUT_MSEC, THREAD_TIMEOUT_MSEC);
383 newConf.setLong(HFileCleaner.HFILE_DELETE_THREAD_CHECK_INTERVAL_MSEC,
384 THREAD_CHECK_INTERVAL_MSEC);
385 cleaner.onConfigurationChange(newConf);
386
387 LOG.debug("File deleted from large queue: " + cleaner.getNumOfDeletedLargeFiles()
388 + "; from small queue: " + cleaner.getNumOfDeletedSmallFiles());
389 cleaner.onConfigurationChange(newConf);
390
391
392 Assert.assertEquals(UPDATE_THROTTLE_POINT, cleaner.getThrottlePoint());
393 Assert.assertEquals(UPDATE_QUEUE_INIT_SIZE, cleaner.getLargeQueueInitSize());
394 Assert.assertEquals(UPDATE_QUEUE_INIT_SIZE, cleaner.getSmallQueueInitSize());
395 Assert.assertEquals(LARGE_THREAD_NUM + SMALL_THREAD_NUM, cleaner.getCleanerThreads().size());
396 Assert.assertEquals(THREAD_TIMEOUT_MSEC, cleaner.getCleanerThreadTimeoutMsec());
397 Assert.assertEquals(THREAD_CHECK_INTERVAL_MSEC, cleaner.getCleanerThreadCheckIntervalMsec());
398
399
400 List<Thread> oldThreads = cleaner.getCleanerThreads();
401 cleaner.onConfigurationChange(newConf);
402 List<Thread> newThreads = cleaner.getCleanerThreads();
403 Assert.assertArrayEquals(oldThreads.toArray(), newThreads.toArray());
404
405
406 t.join();
407 LOG.debug("File deleted from large queue: " + cleaner.getNumOfDeletedLargeFiles()
408 + "; from small queue: " + cleaner.getNumOfDeletedSmallFiles());
409 Assert.assertTrue("Should delete more than " + LARGE_FILE_NUM
410 + " files from large queue but actually " + cleaner.getNumOfDeletedLargeFiles(),
411 cleaner.getNumOfDeletedLargeFiles() > LARGE_FILE_NUM);
412 Assert.assertTrue("Should delete less than " + SMALL_FILE_NUM
413 + " files from small queue but actually " + cleaner.getNumOfDeletedSmallFiles(),
414 cleaner.getNumOfDeletedSmallFiles() < SMALL_FILE_NUM);
415 }
416
417 private void createFilesForTesting(int largeFileNum, int smallFileNum, FileSystem fs,
418 Path archivedHfileDir) throws IOException {
419 final Random rand = new Random();
420 final byte[] large = new byte[1024 * 1024];
421 for (int i = 0; i < large.length; i++) {
422 large[i] = (byte) rand.nextInt(128);
423 }
424 final byte[] small = new byte[1024];
425 for (int i = 0; i < small.length; i++) {
426 small[i] = (byte) rand.nextInt(128);
427 }
428
429 for (int i = 1; i <= largeFileNum; i++) {
430 FSDataOutputStream out = fs.create(new Path(archivedHfileDir, "large-file-" + i));
431 out.write(large);
432 out.close();
433 }
434 for (int i = 1; i <= smallFileNum; i++) {
435 FSDataOutputStream out = fs.create(new Path(archivedHfileDir, "small-file-" + i));
436 out.write(small);
437 out.close();
438 }
439 }
440 }