1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.master.cleaner;
19
20 import static org.junit.Assert.assertEquals;
21 import static org.junit.Assert.assertFalse;
22 import static org.junit.Assert.assertTrue;
23
24 import java.io.IOException;
25
26 import org.apache.commons.logging.Log;
27 import org.apache.commons.logging.LogFactory;
28 import org.apache.hadoop.conf.Configuration;
29 import org.apache.hadoop.fs.FileStatus;
30 import org.apache.hadoop.fs.FileSystem;
31 import org.apache.hadoop.fs.Path;
32 import org.apache.hadoop.hbase.CoordinatedStateManager;
33 import org.apache.hadoop.hbase.HBaseTestingUtility;
34 import org.apache.hadoop.hbase.HConstants;
35 import org.apache.hadoop.hbase.testclassification.MediumTests;
36 import org.apache.hadoop.hbase.Server;
37 import org.apache.hadoop.hbase.ServerName;
38 import org.apache.hadoop.hbase.client.ClusterConnection;
39 import org.apache.hadoop.hbase.util.EnvironmentEdge;
40 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
41 import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
42 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
43 import org.junit.AfterClass;
44 import org.junit.BeforeClass;
45 import org.junit.Test;
46 import org.junit.experimental.categories.Category;
47
48 @Category(MediumTests.class)
49 public class TestHFileCleaner {
50 private static final Log LOG = LogFactory.getLog(TestHFileCleaner.class);
51
52 private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
53
54 @BeforeClass
55 public static void setupCluster() throws Exception {
56
57 UTIL.startMiniDFSCluster(1);
58 }
59
60 @AfterClass
61 public static void shutdownCluster() throws IOException {
62 UTIL.shutdownMiniDFSCluster();
63 }
64
65 @Test
66 public void testTTLCleaner() throws IOException, InterruptedException {
67 FileSystem fs = UTIL.getDFSCluster().getFileSystem();
68 Path root = UTIL.getDataTestDirOnTestFS();
69 Path file = new Path(root, "file");
70 fs.createNewFile(file);
71 long createTime = System.currentTimeMillis();
72 assertTrue("Test file not created!", fs.exists(file));
73 TimeToLiveHFileCleaner cleaner = new TimeToLiveHFileCleaner();
74
75 fs.setTimes(file, createTime - 100, -1);
76 Configuration conf = UTIL.getConfiguration();
77 conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, 100);
78 cleaner.setConf(conf);
79 assertTrue("File not set deletable - check mod time:" + getFileStats(file, fs)
80 + " with create time:" + createTime, cleaner.isFileDeletable(fs.getFileStatus(file)));
81 }
82
83
84
85
86
87 private String getFileStats(Path file, FileSystem fs) throws IOException {
88 FileStatus status = fs.getFileStatus(file);
89 return "File" + file + ", mtime:" + status.getModificationTime() + ", atime:"
90 + status.getAccessTime();
91 }
92
93 @Test(timeout = 60 *1000)
94 public void testHFileCleaning() throws Exception {
95 final EnvironmentEdge originalEdge = EnvironmentEdgeManager.getDelegate();
96 String prefix = "someHFileThatWouldBeAUUID";
97 Configuration conf = UTIL.getConfiguration();
98
99 long ttl = 2000;
100 conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS,
101 "org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner");
102 conf.setLong(TimeToLiveHFileCleaner.TTL_CONF_KEY, ttl);
103 Server server = new DummyServer();
104 Path archivedHfileDir = new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
105 FileSystem fs = FileSystem.get(conf);
106 HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir);
107
108
109 final long createTime = System.currentTimeMillis();
110 fs.delete(archivedHfileDir, true);
111 fs.mkdirs(archivedHfileDir);
112
113 fs.createNewFile(new Path(archivedHfileDir, "dfd-dfd"));
114
115
116 LOG.debug("Now is: " + createTime);
117 for (int i = 1; i < 32; i++) {
118
119
120 Path fileName = new Path(archivedHfileDir, (prefix + "." + (createTime + i)));
121 fs.createNewFile(fileName);
122
123 fs.setTimes(fileName, createTime - ttl - 1, -1);
124 LOG.debug("Creating " + getFileStats(fileName, fs));
125 }
126
127
128
129 Path saved = new Path(archivedHfileDir, prefix + ".00000000000");
130 fs.createNewFile(saved);
131
132 fs.setTimes(saved, createTime - ttl / 2, -1);
133 LOG.debug("Creating " + getFileStats(saved, fs));
134 for (FileStatus stat : fs.listStatus(archivedHfileDir)) {
135 LOG.debug(stat.getPath().toString());
136 }
137
138 assertEquals(33, fs.listStatus(archivedHfileDir).length);
139
140
141 EnvironmentEdge setTime = new EnvironmentEdge() {
142 @Override
143 public long currentTime() {
144 return createTime;
145 }
146 };
147 EnvironmentEdgeManager.injectEdge(setTime);
148
149
150 cleaner.chore();
151
152
153 assertEquals(1, fs.listStatus(archivedHfileDir).length);
154
155 for (FileStatus file : fs.listStatus(archivedHfileDir)) {
156 LOG.debug("Kept hfiles: " + file.getPath().getName());
157 }
158
159 cleaner.interrupt();
160
161 EnvironmentEdgeManager.injectEdge(originalEdge);
162 }
163
164 @Test
165 public void testRemovesEmptyDirectories() throws Exception {
166 Configuration conf = UTIL.getConfiguration();
167
168 conf.setStrings(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS, "");
169 Server server = new DummyServer();
170 Path archivedHfileDir = new Path(UTIL.getDataTestDirOnTestFS(), HConstants.HFILE_ARCHIVE_DIRECTORY);
171
172
173 FileSystem fs = UTIL.getDFSCluster().getFileSystem();
174 HFileCleaner cleaner = new HFileCleaner(1000, server, conf, fs, archivedHfileDir);
175
176
177 Path table = new Path(archivedHfileDir, "table");
178 Path region = new Path(table, "regionsomthing");
179 Path family = new Path(region, "fam");
180 Path file = new Path(family, "file12345");
181 fs.mkdirs(family);
182 if (!fs.exists(family)) throw new RuntimeException("Couldn't create test family:" + family);
183 fs.create(file).close();
184 if (!fs.exists(file)) throw new RuntimeException("Test file didn't get created:" + file);
185
186
187 cleaner.chore();
188
189
190 assertFalse("family directory not removed for empty directory", fs.exists(family));
191 assertFalse("region directory not removed for empty directory", fs.exists(region));
192 assertFalse("table directory not removed for empty directory", fs.exists(table));
193 assertTrue("archive directory", fs.exists(archivedHfileDir));
194 }
195
196 static class DummyServer implements Server {
197
198 @Override
199 public Configuration getConfiguration() {
200 return UTIL.getConfiguration();
201 }
202
203 @Override
204 public ZooKeeperWatcher getZooKeeper() {
205 try {
206 return new ZooKeeperWatcher(getConfiguration(), "dummy server", this);
207 } catch (IOException e) {
208 e.printStackTrace();
209 }
210 return null;
211 }
212
213 @Override
214 public CoordinatedStateManager getCoordinatedStateManager() {
215 return null;
216 }
217
218 @Override
219 public ClusterConnection getConnection() {
220 return null;
221 }
222
223 @Override
224 public MetaTableLocator getMetaTableLocator() {
225 return null;
226 }
227
228 @Override
229 public ServerName getServerName() {
230 return ServerName.valueOf("regionserver,60020,000000");
231 }
232
233 @Override
234 public void abort(String why, Throwable e) {
235 }
236
237 @Override
238 public boolean isAborted() {
239 return false;
240 }
241
242 @Override
243 public void stop(String why) {
244 }
245
246 @Override
247 public boolean isStopped() {
248 return false;
249 }
250 }
251 }