1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.util;
20
21 import java.io.ByteArrayInputStream;
22 import java.io.DataInputStream;
23 import java.io.EOFException;
24 import java.io.FileNotFoundException;
25 import java.io.IOException;
26 import java.io.InputStream;
27 import java.lang.reflect.Method;
28 import java.net.URI;
29 import java.net.URISyntaxException;
30 import java.util.ArrayList;
31 import java.util.HashMap;
32 import java.util.List;
33 import java.util.Map;
34 import java.util.regex.Pattern;
35
36 import org.apache.commons.logging.Log;
37 import org.apache.commons.logging.LogFactory;
38 import org.apache.hadoop.classification.InterfaceAudience;
39 import org.apache.hadoop.classification.InterfaceStability;
40 import org.apache.hadoop.conf.Configuration;
41 import org.apache.hadoop.fs.BlockLocation;
42 import org.apache.hadoop.fs.FSDataInputStream;
43 import org.apache.hadoop.fs.FSDataOutputStream;
44 import org.apache.hadoop.fs.FileStatus;
45 import org.apache.hadoop.fs.FileSystem;
46 import org.apache.hadoop.fs.Path;
47 import org.apache.hadoop.fs.PathFilter;
48 import org.apache.hadoop.fs.permission.FsAction;
49 import org.apache.hadoop.fs.permission.FsPermission;
50 import org.apache.hadoop.hbase.ClusterId;
51 import org.apache.hadoop.hbase.exceptions.DeserializationException;
52 import org.apache.hadoop.hbase.HColumnDescriptor;
53 import org.apache.hadoop.hbase.HConstants;
54 import org.apache.hadoop.hbase.HDFSBlocksDistribution;
55 import org.apache.hadoop.hbase.HRegionInfo;
56 import org.apache.hadoop.hbase.RemoteExceptionHandler;
57 import org.apache.hadoop.hbase.exceptions.FileSystemVersionException;
58 import org.apache.hadoop.hbase.master.HMaster;
59 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
60 import org.apache.hadoop.hbase.protobuf.generated.FSProtos;
61 import org.apache.hadoop.hbase.regionserver.HRegion;
62 import org.apache.hadoop.hdfs.DistributedFileSystem;
63 import org.apache.hadoop.io.IOUtils;
64 import org.apache.hadoop.io.SequenceFile;
65 import org.apache.hadoop.security.AccessControlException;
66 import org.apache.hadoop.security.UserGroupInformation;
67 import org.apache.hadoop.util.ReflectionUtils;
68 import org.apache.hadoop.util.StringUtils;
69
70 import com.google.common.primitives.Ints;
71 import com.google.protobuf.InvalidProtocolBufferException;
72
73
74
75
76 @InterfaceAudience.Public
77 @InterfaceStability.Evolving
78 public abstract class FSUtils {
79 private static final Log LOG = LogFactory.getLog(FSUtils.class);
80
81
82 private static final String FULL_RWX_PERMISSIONS = "777";
83
84 protected FSUtils() {
85 super();
86 }
87
88
89
90
91
92
93
94
95 public static boolean isStartingWithPath(final Path rootPath, final String path) {
96 String uriRootPath = rootPath.toUri().getPath();
97 String tailUriPath = (new Path(path)).toUri().getPath();
98 return tailUriPath.startsWith(uriRootPath);
99 }
100
101
102
103
104
105
106
107
108
109 public static boolean isMatchingTail(final Path pathToSearch, String pathTail) {
110 return isMatchingTail(pathToSearch, new Path(pathTail));
111 }
112
113
114
115
116
117
118
119
120
121 public static boolean isMatchingTail(final Path pathToSearch, final Path pathTail) {
122 if (pathToSearch.depth() != pathTail.depth()) return false;
123 Path tailPath = pathTail;
124 String tailName;
125 Path toSearch = pathToSearch;
126 String toSearchName;
127 boolean result = false;
128 do {
129 tailName = tailPath.getName();
130 if (tailName == null || tailName.length() <= 0) {
131 result = true;
132 break;
133 }
134 toSearchName = toSearch.getName();
135 if (toSearchName == null || toSearchName.length() <= 0) break;
136
137 tailPath = tailPath.getParent();
138 toSearch = toSearch.getParent();
139 } while(tailName.equals(toSearchName));
140 return result;
141 }
142
143 public static FSUtils getInstance(FileSystem fs, Configuration conf) {
144 String scheme = fs.getUri().getScheme();
145 if (scheme == null) {
146 LOG.warn("Could not find scheme for uri " +
147 fs.getUri() + ", default to hdfs");
148 scheme = "hdfs";
149 }
150 Class<?> fsUtilsClass = conf.getClass("hbase.fsutil." +
151 scheme + ".impl", FSHDFSUtils.class);
152 FSUtils fsUtils = (FSUtils)ReflectionUtils.newInstance(fsUtilsClass, conf);
153 return fsUtils;
154 }
155
156
157
158
159
160
161
162
163 public static boolean deleteDirectory(final FileSystem fs, final Path dir)
164 throws IOException {
165 return fs.exists(dir) && fs.delete(dir, true);
166 }
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185 public static FSDataOutputStream create(FileSystem fs, Path path,
186 FsPermission perm) throws IOException {
187 return create(fs, path, perm, true);
188 }
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207 public static FSDataOutputStream create(FileSystem fs, Path path,
208 FsPermission perm, boolean overwrite) throws IOException {
209 LOG.debug("Creating file=" + path + " with permission=" + perm);
210
211 return fs.create(path, perm, overwrite,
212 fs.getConf().getInt("io.file.buffer.size", 4096),
213 fs.getDefaultReplication(), fs.getDefaultBlockSize(), null);
214 }
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229 public static FsPermission getFilePermissions(final FileSystem fs,
230 final Configuration conf, final String permssionConfKey) {
231 boolean enablePermissions = conf.getBoolean(
232 HConstants.ENABLE_DATA_FILE_UMASK, false);
233
234 if (enablePermissions) {
235 try {
236 FsPermission perm = new FsPermission(FULL_RWX_PERMISSIONS);
237
238 String mask = conf.get(permssionConfKey);
239 if (mask == null)
240 return FsPermission.getDefault();
241
242 FsPermission umask = new FsPermission(mask);
243 return perm.applyUMask(umask);
244 } catch (IllegalArgumentException e) {
245 LOG.warn(
246 "Incorrect umask attempted to be created: "
247 + conf.get(permssionConfKey)
248 + ", using default file permissions.", e);
249 return FsPermission.getDefault();
250 }
251 }
252 return FsPermission.getDefault();
253 }
254
255
256
257
258
259
260
261 public static void checkFileSystemAvailable(final FileSystem fs)
262 throws IOException {
263 if (!(fs instanceof DistributedFileSystem)) {
264 return;
265 }
266 IOException exception = null;
267 DistributedFileSystem dfs = (DistributedFileSystem) fs;
268 try {
269 if (dfs.exists(new Path("/"))) {
270 return;
271 }
272 } catch (IOException e) {
273 exception = RemoteExceptionHandler.checkIOException(e);
274 }
275 try {
276 fs.close();
277 } catch (Exception e) {
278 LOG.error("file system close failed: ", e);
279 }
280 IOException io = new IOException("File system is not available");
281 io.initCause(exception);
282 throw io;
283 }
284
285
286
287
288
289
290
291
292
293 private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {
294 boolean inSafeMode = false;
295 try {
296 Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class<?> []{
297 org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.class, boolean.class});
298 inSafeMode = (Boolean) m.invoke(dfs,
299 org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET, true);
300 } catch (Exception e) {
301 if (e instanceof IOException) throw (IOException) e;
302
303
304 inSafeMode = dfs.setSafeMode(
305 org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET);
306 }
307 return inSafeMode;
308 }
309
310
311
312
313
314
315 public static void checkDfsSafeMode(final Configuration conf)
316 throws IOException {
317 boolean isInSafeMode = false;
318 FileSystem fs = FileSystem.get(conf);
319 if (fs instanceof DistributedFileSystem) {
320 DistributedFileSystem dfs = (DistributedFileSystem)fs;
321 isInSafeMode = isInSafeMode(dfs);
322 }
323 if (isInSafeMode) {
324 throw new IOException("File system is in safemode, it can't be written now");
325 }
326 }
327
328
329
330
331
332
333
334
335
336
337 public static String getVersion(FileSystem fs, Path rootdir)
338 throws IOException, DeserializationException {
339 Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);
340 FileStatus[] status = null;
341 try {
342
343
344 status = fs.listStatus(versionFile);
345 } catch (FileNotFoundException fnfe) {
346 return null;
347 }
348 if (status == null || status.length == 0) return null;
349 String version = null;
350 byte [] content = new byte [(int)status[0].getLen()];
351 FSDataInputStream s = fs.open(versionFile);
352 try {
353 IOUtils.readFully(s, content, 0, content.length);
354 if (ProtobufUtil.isPBMagicPrefix(content)) {
355 version = parseVersionFrom(content);
356 } else {
357
358 InputStream is = new ByteArrayInputStream(content);
359 DataInputStream dis = new DataInputStream(is);
360 try {
361 version = dis.readUTF();
362 } finally {
363 dis.close();
364 }
365
366 LOG.info("Updating the hbase.version file format with version=" + version);
367 setVersion(fs, rootdir, version, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);
368 }
369 } catch (EOFException eof) {
370 LOG.warn("Version file was empty, odd, will try to set it.");
371 } finally {
372 s.close();
373 }
374 return version;
375 }
376
377
378
379
380
381
382
383 static String parseVersionFrom(final byte [] bytes)
384 throws DeserializationException {
385 ProtobufUtil.expectPBMagicPrefix(bytes);
386 int pblen = ProtobufUtil.lengthOfPBMagic();
387 FSProtos.HBaseVersionFileContent.Builder builder =
388 FSProtos.HBaseVersionFileContent.newBuilder();
389 FSProtos.HBaseVersionFileContent fileContent;
390 try {
391 fileContent = builder.mergeFrom(bytes, pblen, bytes.length - pblen).build();
392 return fileContent.getVersion();
393 } catch (InvalidProtocolBufferException e) {
394
395 throw new DeserializationException(e);
396 }
397 }
398
399
400
401
402
403
404 static byte [] toVersionByteArray(final String version) {
405 FSProtos.HBaseVersionFileContent.Builder builder =
406 FSProtos.HBaseVersionFileContent.newBuilder();
407 return ProtobufUtil.prependPBMagic(builder.setVersion(version).build().toByteArray());
408 }
409
410
411
412
413
414
415
416
417
418
419
420 public static void checkVersion(FileSystem fs, Path rootdir, boolean message)
421 throws IOException, DeserializationException {
422 checkVersion(fs, rootdir, message, 0, HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);
423 }
424
425
426
427
428
429
430
431
432
433
434
435
436
437 public static void checkVersion(FileSystem fs, Path rootdir,
438 boolean message, int wait, int retries)
439 throws IOException, DeserializationException {
440 String version = getVersion(fs, rootdir);
441 if (version == null) {
442 if (!metaRegionExists(fs, rootdir)) {
443
444
445 setVersion(fs, rootdir, wait, retries);
446 return;
447 }
448 } else if (version.compareTo(HConstants.FILE_SYSTEM_VERSION) == 0) return;
449
450
451
452 String msg = "File system needs to be upgraded."
453 + " You have version " + version
454 + " and I want version " + HConstants.FILE_SYSTEM_VERSION
455 + ". Run the '${HBASE_HOME}/bin/hbase migrate' script.";
456 if (message) {
457 System.out.println("WARNING! " + msg);
458 }
459 throw new FileSystemVersionException(msg);
460 }
461
462
463
464
465
466
467
468
469 public static void setVersion(FileSystem fs, Path rootdir)
470 throws IOException {
471 setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, 0,
472 HConstants.DEFAULT_VERSION_FILE_WRITE_ATTEMPTS);
473 }
474
475
476
477
478
479
480
481
482
483
484 public static void setVersion(FileSystem fs, Path rootdir, int wait, int retries)
485 throws IOException {
486 setVersion(fs, rootdir, HConstants.FILE_SYSTEM_VERSION, wait, retries);
487 }
488
489
490
491
492
493
494
495
496
497
498
499
500 public static void setVersion(FileSystem fs, Path rootdir, String version,
501 int wait, int retries) throws IOException {
502 Path versionFile = new Path(rootdir, HConstants.VERSION_FILE_NAME);
503 while (true) {
504 try {
505 FSDataOutputStream s = fs.create(versionFile);
506 s.write(toVersionByteArray(version));
507 s.close();
508 LOG.debug("Created version file at " + rootdir.toString() + " with version=" + version);
509 return;
510 } catch (IOException e) {
511 if (retries > 0) {
512 LOG.warn("Unable to create version file at " + rootdir.toString() + ", retrying", e);
513 fs.delete(versionFile, false);
514 try {
515 if (wait > 0) {
516 Thread.sleep(wait);
517 }
518 } catch (InterruptedException ex) {
519
520 }
521 retries--;
522 } else {
523 throw e;
524 }
525 }
526 }
527 }
528
529
530
531
532
533
534
535
536
537 public static boolean checkClusterIdExists(FileSystem fs, Path rootdir,
538 int wait) throws IOException {
539 while (true) {
540 try {
541 Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);
542 return fs.exists(filePath);
543 } catch (IOException ioe) {
544 if (wait > 0) {
545 LOG.warn("Unable to check cluster ID file in " + rootdir.toString() +
546 ", retrying in "+wait+"msec: "+StringUtils.stringifyException(ioe));
547 try {
548 Thread.sleep(wait);
549 } catch (InterruptedException ie) {
550 Thread.interrupted();
551 break;
552 }
553 } else {
554 throw ioe;
555 }
556 }
557 }
558 return false;
559 }
560
561
562
563
564
565
566
567
568 public static ClusterId getClusterId(FileSystem fs, Path rootdir)
569 throws IOException {
570 Path idPath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);
571 ClusterId clusterId = null;
572 FileStatus status = fs.exists(idPath)? fs.getFileStatus(idPath): null;
573 if (status != null) {
574 int len = Ints.checkedCast(status.getLen());
575 byte [] content = new byte[len];
576 FSDataInputStream in = fs.open(idPath);
577 try {
578 in.readFully(content);
579 } catch (EOFException eof) {
580 LOG.warn("Cluster ID file " + idPath.toString() + " was empty");
581 } finally{
582 in.close();
583 }
584 try {
585 clusterId = ClusterId.parseFrom(content);
586 } catch (DeserializationException e) {
587 throw new IOException("content=" + Bytes.toString(content), e);
588 }
589
590 if (!ProtobufUtil.isPBMagicPrefix(content)) rewriteAsPb(fs, rootdir, idPath, clusterId);
591 return clusterId;
592 } else {
593 LOG.warn("Cluster ID file does not exist at " + idPath.toString());
594 }
595 return clusterId;
596 }
597
598
599
600
601
602 private static void rewriteAsPb(final FileSystem fs, final Path rootdir, final Path p,
603 final ClusterId cid)
604 throws IOException {
605
606
607 Path movedAsideName = new Path(p + "." + System.currentTimeMillis());
608 if (!fs.rename(p, movedAsideName)) throw new IOException("Failed rename of " + p);
609 setClusterId(fs, rootdir, cid, 100);
610 if (!fs.delete(movedAsideName, false)) {
611 throw new IOException("Failed delete of " + movedAsideName);
612 }
613 LOG.debug("Rewrote the hbase.id file as pb");
614 }
615
616
617
618
619
620
621
622
623
624
625 public static void setClusterId(FileSystem fs, Path rootdir, ClusterId clusterId,
626 int wait) throws IOException {
627 while (true) {
628 try {
629 Path filePath = new Path(rootdir, HConstants.CLUSTER_ID_FILE_NAME);
630 FSDataOutputStream s = fs.create(filePath);
631 try {
632 s.write(clusterId.toByteArray());
633 } finally {
634 s.close();
635 }
636 if (LOG.isDebugEnabled()) {
637 LOG.debug("Created cluster ID file at " + filePath.toString() + " with ID: " + clusterId);
638 }
639 return;
640 } catch (IOException ioe) {
641 if (wait > 0) {
642 LOG.warn("Unable to create cluster ID file in " + rootdir.toString() +
643 ", retrying in " + wait + "msec: " + StringUtils.stringifyException(ioe));
644 try {
645 Thread.sleep(wait);
646 } catch (InterruptedException ie) {
647 Thread.interrupted();
648 break;
649 }
650 } else {
651 throw ioe;
652 }
653 }
654 }
655 }
656
657
658
659
660
661
662
663
664 public static Path validateRootPath(Path root) throws IOException {
665 try {
666 URI rootURI = new URI(root.toString());
667 String scheme = rootURI.getScheme();
668 if (scheme == null) {
669 throw new IOException("Root directory does not have a scheme");
670 }
671 return root;
672 } catch (URISyntaxException e) {
673 IOException io = new IOException("Root directory path is not a valid " +
674 "URI -- check your " + HConstants.HBASE_DIR + " configuration");
675 io.initCause(e);
676 throw io;
677 }
678 }
679
680
681
682
683
684
685
686
687
688 public static String removeRootPath(Path path, final Configuration conf) throws IOException {
689 Path root = FSUtils.getRootDir(conf);
690 String pathStr = path.toString();
691
692 if (!pathStr.startsWith(root.toString())) return pathStr;
693
694 return pathStr.substring(root.toString().length() + 1);
695 }
696
697
698
699
700
701
702
703 public static void waitOnSafeMode(final Configuration conf,
704 final long wait)
705 throws IOException {
706 FileSystem fs = FileSystem.get(conf);
707 if (!(fs instanceof DistributedFileSystem)) return;
708 DistributedFileSystem dfs = (DistributedFileSystem)fs;
709
710 while (isInSafeMode(dfs)) {
711 LOG.info("Waiting for dfs to exit safe mode...");
712 try {
713 Thread.sleep(wait);
714 } catch (InterruptedException e) {
715
716 }
717 }
718 }
719
720
721
722
723
724
725
726
727
728
729
730 public static String getPath(Path p) {
731 return p.toUri().getPath();
732 }
733
734
735
736
737
738
739
740 public static Path getRootDir(final Configuration c) throws IOException {
741 Path p = new Path(c.get(HConstants.HBASE_DIR));
742 FileSystem fs = p.getFileSystem(c);
743 return p.makeQualified(fs);
744 }
745
746 public static void setRootDir(final Configuration c, final Path root) throws IOException {
747 c.set(HConstants.HBASE_DIR, root.toString());
748 }
749
750 public static void setFsDefault(final Configuration c, final Path root) throws IOException {
751 c.set("fs.defaultFS", root.toString());
752 c.set("fs.default.name", root.toString());
753 }
754
755
756
757
758
759
760
761
762
763 public static boolean metaRegionExists(FileSystem fs, Path rootdir)
764 throws IOException {
765 Path rootRegionDir =
766 HRegion.getRegionDir(rootdir, HRegionInfo.FIRST_META_REGIONINFO);
767 return fs.exists(rootRegionDir);
768 }
769
770
771
772
773
774
775
776
777
778 static public HDFSBlocksDistribution computeHDFSBlocksDistribution(
779 final FileSystem fs, FileStatus status, long start, long length)
780 throws IOException {
781 HDFSBlocksDistribution blocksDistribution = new HDFSBlocksDistribution();
782 BlockLocation [] blockLocations =
783 fs.getFileBlockLocations(status, start, length);
784 for(BlockLocation bl : blockLocations) {
785 String [] hosts = bl.getHosts();
786 long len = bl.getLength();
787 blocksDistribution.addHostsAndBlockWeight(hosts, len);
788 }
789
790 return blocksDistribution;
791 }
792
793
794
795
796
797
798
799
800
801
802
803
804 public static boolean isMajorCompacted(final FileSystem fs,
805 final Path hbaseRootDir)
806 throws IOException {
807
808 FileStatus [] tableDirs = fs.listStatus(hbaseRootDir, new DirFilter(fs));
809 for (FileStatus tableDir : tableDirs) {
810
811
812
813
814 Path d = tableDir.getPath();
815 if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) {
816 continue;
817 }
818 FileStatus[] regionDirs = fs.listStatus(d, new DirFilter(fs));
819 for (FileStatus regionDir : regionDirs) {
820 Path dd = regionDir.getPath();
821 if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
822 continue;
823 }
824
825 FileStatus[] familyDirs = fs.listStatus(dd, new DirFilter(fs));
826 for (FileStatus familyDir : familyDirs) {
827 Path family = familyDir.getPath();
828
829 FileStatus[] familyStatus = fs.listStatus(family);
830 if (familyStatus.length > 1) {
831 LOG.debug(family.toString() + " has " + familyStatus.length +
832 " files.");
833 return false;
834 }
835 }
836 }
837 }
838 return true;
839 }
840
841
842
843
844
845
846
847
848
849
850 public static int getTotalTableFragmentation(final HMaster master)
851 throws IOException {
852 Map<String, Integer> map = getTableFragmentation(master);
853 return map != null && map.size() > 0 ? map.get("-TOTAL-") : -1;
854 }
855
856
857
858
859
860
861
862
863
864
865
866 public static Map<String, Integer> getTableFragmentation(
867 final HMaster master)
868 throws IOException {
869 Path path = getRootDir(master.getConfiguration());
870
871 FileSystem fs = path.getFileSystem(master.getConfiguration());
872 return getTableFragmentation(fs, path);
873 }
874
875
876
877
878
879
880
881
882
883
884
885 public static Map<String, Integer> getTableFragmentation(
886 final FileSystem fs, final Path hbaseRootDir)
887 throws IOException {
888 Map<String, Integer> frags = new HashMap<String, Integer>();
889 int cfCountTotal = 0;
890 int cfFragTotal = 0;
891 DirFilter df = new DirFilter(fs);
892
893 FileStatus [] tableDirs = fs.listStatus(hbaseRootDir, df);
894 for (FileStatus tableDir : tableDirs) {
895
896
897
898
899 Path d = tableDir.getPath();
900 if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) {
901 continue;
902 }
903 int cfCount = 0;
904 int cfFrag = 0;
905 FileStatus[] regionDirs = fs.listStatus(d, df);
906 for (FileStatus regionDir : regionDirs) {
907 Path dd = regionDir.getPath();
908 if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
909 continue;
910 }
911
912 FileStatus[] familyDirs = fs.listStatus(dd, df);
913 for (FileStatus familyDir : familyDirs) {
914 cfCount++;
915 cfCountTotal++;
916 Path family = familyDir.getPath();
917
918 FileStatus[] familyStatus = fs.listStatus(family);
919 if (familyStatus.length > 1) {
920 cfFrag++;
921 cfFragTotal++;
922 }
923 }
924 }
925
926 frags.put(d.getName(), Math.round((float) cfFrag / cfCount * 100));
927 }
928
929 frags.put("-TOTAL-", Math.round((float) cfFragTotal / cfCountTotal * 100));
930 return frags;
931 }
932
933
934
935
936
937
938
939
940 public static boolean isPre020FileLayout(final FileSystem fs,
941 final Path hbaseRootDir)
942 throws IOException {
943 Path mapfiles = new Path(new Path(new Path(new Path(hbaseRootDir, "-ROOT-"),
944 "70236052"), "info"), "mapfiles");
945 return fs.exists(mapfiles);
946 }
947
948
949
950
951
952
953
954
955
956
957
958
959 public static boolean isMajorCompactedPre020(final FileSystem fs,
960 final Path hbaseRootDir)
961 throws IOException {
962
963 FileStatus [] tableDirs = fs.listStatus(hbaseRootDir, new DirFilter(fs));
964 for (FileStatus tableDir : tableDirs) {
965
966
967
968
969 Path d = tableDir.getPath();
970 if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) {
971 continue;
972 }
973 FileStatus[] regionDirs = fs.listStatus(d, new DirFilter(fs));
974 for (FileStatus regionDir : regionDirs) {
975 Path dd = regionDir.getPath();
976 if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
977 continue;
978 }
979
980 FileStatus[] familyDirs = fs.listStatus(dd, new DirFilter(fs));
981 for (FileStatus familyDir : familyDirs) {
982 Path family = familyDir.getPath();
983 FileStatus[] infoAndMapfile = fs.listStatus(family);
984
985 if (infoAndMapfile.length != 0 && infoAndMapfile.length != 2) {
986 LOG.debug(family.toString() +
987 " has more than just info and mapfile: " + infoAndMapfile.length);
988 return false;
989 }
990
991 for (int ll = 0; ll < 2; ll++) {
992 if (infoAndMapfile[ll].getPath().getName().equals("info") ||
993 infoAndMapfile[ll].getPath().getName().equals("mapfiles"))
994 continue;
995 LOG.debug("Unexpected directory name: " +
996 infoAndMapfile[ll].getPath());
997 return false;
998 }
999
1000
1001 FileStatus[] familyStatus =
1002 fs.listStatus(new Path(family, "mapfiles"));
1003 if (familyStatus.length > 1) {
1004 LOG.debug(family.toString() + " has " + familyStatus.length +
1005 " files.");
1006 return false;
1007 }
1008 }
1009 }
1010 }
1011 return true;
1012 }
1013
1014
1015
1016
1017 static class FileFilter implements PathFilter {
1018 private final FileSystem fs;
1019
1020 public FileFilter(final FileSystem fs) {
1021 this.fs = fs;
1022 }
1023
1024 @Override
1025 public boolean accept(Path p) {
1026 try {
1027 return fs.isFile(p);
1028 } catch (IOException e) {
1029 LOG.debug("unable to verify if path=" + p + " is a regular file", e);
1030 return false;
1031 }
1032 }
1033 }
1034
1035
1036
1037
1038 public static class DirFilter implements PathFilter {
1039 private final FileSystem fs;
1040
1041 public DirFilter(final FileSystem fs) {
1042 this.fs = fs;
1043 }
1044
1045 @Override
1046 public boolean accept(Path p) {
1047 boolean isValid = false;
1048 try {
1049 if (HConstants.HBASE_NON_USER_TABLE_DIRS.contains(p.toString())) {
1050 isValid = false;
1051 } else {
1052 isValid = fs.getFileStatus(p).isDir();
1053 }
1054 } catch (IOException e) {
1055 LOG.warn("An error occurred while verifying if [" + p.toString() +
1056 "] is a valid directory. Returning 'not valid' and continuing.", e);
1057 }
1058 return isValid;
1059 }
1060 }
1061
1062
1063
1064
1065
1066
1067
1068
1069 public static boolean isAppendSupported(final Configuration conf) {
1070 boolean append = conf.getBoolean("dfs.support.append", false);
1071 if (append) {
1072 try {
1073
1074
1075
1076 SequenceFile.Writer.class.getMethod("syncFs", new Class<?> []{});
1077 append = true;
1078 } catch (SecurityException e) {
1079 } catch (NoSuchMethodException e) {
1080 append = false;
1081 }
1082 }
1083 if (!append) {
1084
1085 try {
1086 FSDataOutputStream.class.getMethod("hflush", new Class<?> []{});
1087 append = true;
1088 } catch (NoSuchMethodException e) {
1089 append = false;
1090 }
1091 }
1092 return append;
1093 }
1094
1095
1096
1097
1098
1099
1100 public static boolean isHDFS(final Configuration conf) throws IOException {
1101 FileSystem fs = FileSystem.get(conf);
1102 String scheme = fs.getUri().getScheme();
1103 return scheme.equalsIgnoreCase("hdfs");
1104 }
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114 public abstract void recoverFileLease(final FileSystem fs, final Path p,
1115 Configuration conf) throws IOException;
1116
1117
1118
1119
1120
1121
1122
1123
1124 public static List<Path> getTableDirs(final FileSystem fs, final Path rootdir)
1125 throws IOException {
1126
1127 FileStatus [] dirs = fs.listStatus(rootdir, new DirFilter(fs));
1128 List<Path> tabledirs = new ArrayList<Path>(dirs.length);
1129 for (FileStatus dir: dirs) {
1130 Path p = dir.getPath();
1131 String tableName = p.getName();
1132 if (!HConstants.HBASE_NON_USER_TABLE_DIRS.contains(tableName)) {
1133 tabledirs.add(p);
1134 }
1135 }
1136 return tabledirs;
1137 }
1138
1139 public static Path getTablePath(Path rootdir, byte [] tableName) {
1140 return getTablePath(rootdir, Bytes.toString(tableName));
1141 }
1142
1143 public static Path getTablePath(Path rootdir, final String tableName) {
1144 return new Path(rootdir, tableName);
1145 }
1146
1147
1148
1149
1150 public static class RegionDirFilter implements PathFilter {
1151
1152 final public static Pattern regionDirPattern = Pattern.compile("^[0-9a-f]*$");
1153 final FileSystem fs;
1154
1155 public RegionDirFilter(FileSystem fs) {
1156 this.fs = fs;
1157 }
1158
1159 @Override
1160 public boolean accept(Path rd) {
1161 if (!regionDirPattern.matcher(rd.getName()).matches()) {
1162 return false;
1163 }
1164
1165 try {
1166 return fs.getFileStatus(rd).isDir();
1167 } catch (IOException ioe) {
1168
1169 LOG.warn("Skipping file " + rd +" due to IOException", ioe);
1170 return false;
1171 }
1172 }
1173 }
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183 public static List<Path> getRegionDirs(final FileSystem fs, final Path tableDir) throws IOException {
1184
1185 FileStatus[] rds = fs.listStatus(tableDir, new RegionDirFilter(fs));
1186 List<Path> regionDirs = new ArrayList<Path>(rds.length);
1187 for (FileStatus rdfs: rds) {
1188 Path rdPath = rdfs.getPath();
1189 regionDirs.add(rdPath);
1190 }
1191 return regionDirs;
1192 }
1193
1194
1195
1196
1197
1198 public static class FamilyDirFilter implements PathFilter {
1199 final FileSystem fs;
1200
1201 public FamilyDirFilter(FileSystem fs) {
1202 this.fs = fs;
1203 }
1204
1205 @Override
1206 public boolean accept(Path rd) {
1207 try {
1208
1209 HColumnDescriptor.isLegalFamilyName(Bytes.toBytes(rd.getName()));
1210 } catch (IllegalArgumentException iae) {
1211
1212 return false;
1213 }
1214
1215 try {
1216 return fs.getFileStatus(rd).isDir();
1217 } catch (IOException ioe) {
1218
1219 LOG.warn("Skipping file " + rd +" due to IOException", ioe);
1220 return false;
1221 }
1222 }
1223 }
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233 public static List<Path> getFamilyDirs(final FileSystem fs, final Path regionDir) throws IOException {
1234
1235 FileStatus[] fds = fs.listStatus(regionDir, new FamilyDirFilter(fs));
1236 List<Path> familyDirs = new ArrayList<Path>(fds.length);
1237 for (FileStatus fdfs: fds) {
1238 Path fdPath = fdfs.getPath();
1239 familyDirs.add(fdPath);
1240 }
1241 return familyDirs;
1242 }
1243
1244
1245
1246
1247 public static class HFileFilter implements PathFilter {
1248
1249 final public static Pattern hfilePattern = Pattern.compile("^([0-9a-f]+)$");
1250
1251 final FileSystem fs;
1252
1253 public HFileFilter(FileSystem fs) {
1254 this.fs = fs;
1255 }
1256
1257 @Override
1258 public boolean accept(Path rd) {
1259 if (!hfilePattern.matcher(rd.getName()).matches()) {
1260 return false;
1261 }
1262
1263 try {
1264
1265 return !fs.getFileStatus(rd).isDir();
1266 } catch (IOException ioe) {
1267
1268 LOG.warn("Skipping file " + rd +" due to IOException", ioe);
1269 return false;
1270 }
1271 }
1272 }
1273
1274
1275
1276
1277
1278
1279 public static FileSystem getCurrentFileSystem(Configuration conf)
1280 throws IOException {
1281 return getRootDir(conf).getFileSystem(conf);
1282 }
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297 public static Map<String, Path> getTableStoreFilePathMap(
1298 final FileSystem fs, final Path hbaseRootDir)
1299 throws IOException {
1300 Map<String, Path> map = new HashMap<String, Path>();
1301
1302
1303
1304
1305 DirFilter df = new DirFilter(fs);
1306
1307 FileStatus [] tableDirs = fs.listStatus(hbaseRootDir, df);
1308 for (FileStatus tableDir : tableDirs) {
1309
1310
1311
1312 Path d = tableDir.getPath();
1313 if (HConstants.HBASE_NON_TABLE_DIRS.contains(d.getName())) {
1314 continue;
1315 }
1316 FileStatus[] regionDirs = fs.listStatus(d, df);
1317 for (FileStatus regionDir : regionDirs) {
1318 Path dd = regionDir.getPath();
1319 if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
1320 continue;
1321 }
1322
1323 FileStatus[] familyDirs = fs.listStatus(dd, df);
1324 for (FileStatus familyDir : familyDirs) {
1325 Path family = familyDir.getPath();
1326
1327
1328 FileStatus[] familyStatus = fs.listStatus(family);
1329 for (FileStatus sfStatus : familyStatus) {
1330 Path sf = sfStatus.getPath();
1331 map.put( sf.getName(), sf);
1332 }
1333
1334 }
1335 }
1336 }
1337 return map;
1338 }
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349 public static FileStatus [] listStatus(final FileSystem fs,
1350 final Path dir, final PathFilter filter) throws IOException {
1351 FileStatus [] status = null;
1352 try {
1353 status = filter == null ? fs.listStatus(dir) : fs.listStatus(dir, filter);
1354 } catch (FileNotFoundException fnfe) {
1355
1356 LOG.debug(dir + " doesn't exist");
1357 }
1358 if (status == null || status.length < 1) return null;
1359 return status;
1360 }
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370 public static FileStatus[] listStatus(final FileSystem fs, final Path dir) throws IOException {
1371 return listStatus(fs, dir, null);
1372 }
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383 public static boolean delete(final FileSystem fs, final Path path, final boolean recursive)
1384 throws IOException {
1385 return fs.delete(path, recursive);
1386 }
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396 public static boolean isExists(final FileSystem fs, final Path path) throws IOException {
1397 return fs.exists(path);
1398 }
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410 public static void checkAccess(UserGroupInformation ugi, FileStatus file,
1411 FsAction action) throws AccessControlException {
1412 if (ugi.getShortUserName().equals(file.getOwner())) {
1413 if (file.getPermission().getUserAction().implies(action)) {
1414 return;
1415 }
1416 } else if (contains(ugi.getGroupNames(), file.getGroup())) {
1417 if (file.getPermission().getGroupAction().implies(action)) {
1418 return;
1419 }
1420 } else if (file.getPermission().getOtherAction().implies(action)) {
1421 return;
1422 }
1423 throw new AccessControlException("Permission denied:" + " action=" + action
1424 + " path=" + file.getPath() + " user=" + ugi.getShortUserName());
1425 }
1426
1427 private static boolean contains(String[] groups, String user) {
1428 for (String group : groups) {
1429 if (group.equals(user)) {
1430 return true;
1431 }
1432 }
1433 return false;
1434 }
1435
1436
1437
1438
1439
1440
1441
1442
1443 public static void logFileSystemState(final FileSystem fs, final Path root, Log LOG)
1444 throws IOException {
1445 LOG.debug("Current file system:");
1446 logFSTree(LOG, fs, root, "|-");
1447 }
1448
1449
1450
1451
1452
1453
1454 private static void logFSTree(Log LOG, final FileSystem fs, final Path root, String prefix)
1455 throws IOException {
1456 FileStatus[] files = FSUtils.listStatus(fs, root, null);
1457 if (files == null) return;
1458
1459 for (FileStatus file : files) {
1460 if (file.isDir()) {
1461 LOG.debug(prefix + file.getPath().getName() + "/");
1462 logFSTree(LOG, fs, file.getPath(), prefix + "---");
1463 } else {
1464 LOG.debug(prefix + file.getPath().getName());
1465 }
1466 }
1467 }
1468 }