1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.util;
21
22 import org.apache.commons.logging.Log;
23 import org.apache.commons.logging.LogFactory;
24 import org.apache.hadoop.conf.Configuration;
25 import org.apache.hadoop.fs.FileSystem;
26 import org.apache.hadoop.fs.Path;
27 import org.apache.hadoop.hbase.io.hfile.Compression;
28 import org.apache.hadoop.hbase.io.hfile.HFile;
29 import org.apache.hadoop.hdfs.DistributedFileSystem;
30 import org.apache.hadoop.io.compress.Compressor;
31
32 import java.io.IOException;
33 import java.net.URI;
34
35
36
37
38
39 public class CompressionTest {
40 static final Log LOG = LogFactory.getLog(CompressionTest.class);
41
42 public static boolean testCompression(String codec) {
43 codec = codec.toLowerCase();
44
45 Compression.Algorithm a;
46
47 try {
48 a = Compression.getCompressionAlgorithmByName(codec);
49 } catch (IllegalArgumentException e) {
50 LOG.warn("Codec type: " + codec + " is not known");
51 return false;
52 }
53
54 try {
55 testCompression(a);
56 return true;
57 } catch (IOException ignored) {
58 LOG.warn("Can't instantiate codec: " + codec, ignored);
59 return false;
60 }
61 }
62
63 private final static Boolean[] compressionTestResults
64 = new Boolean[Compression.Algorithm.values().length];
65 static {
66 for (int i = 0 ; i < compressionTestResults.length ; ++i) {
67 compressionTestResults[i] = null;
68 }
69 }
70
71 public static void testCompression(Compression.Algorithm algo)
72 throws IOException {
73 if (compressionTestResults[algo.ordinal()] != null) {
74 if (compressionTestResults[algo.ordinal()]) {
75 return ;
76 } else {
77
78 throw new IOException("Compression algorithm '" + algo.getName() + "'" +
79 " previously failed test.");
80 }
81 }
82
83 try {
84 Compressor c = algo.getCompressor();
85 algo.returnCompressor(c);
86 compressionTestResults[algo.ordinal()] = true;
87 } catch (Throwable t) {
88 compressionTestResults[algo.ordinal()] = false;
89 throw new IOException(t);
90 }
91 }
92
93 protected static Path path = new Path(".hfile-comp-test");
94
95 public static void usage() {
96 System.err.println(
97 "Usage: CompressionTest <path> none|gz|lzo\n" +
98 "\n" +
99 "For example:\n" +
100 " hbase " + CompressionTest.class + " file:///tmp/testfile gz\n");
101 }
102
103 public static void doSmokeTest(FileSystem fs, Path path, String codec)
104 throws Exception {
105 HFile.Writer writer = new HFile.Writer(
106 fs, path, HFile.DEFAULT_BLOCKSIZE, codec, null);
107 writer.append(Bytes.toBytes("testkey"), Bytes.toBytes("testval"));
108 writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval"));
109 writer.close();
110
111 HFile.Reader reader = new HFile.Reader(fs, path, null, false);
112 reader.loadFileInfo();
113 byte[] key = reader.getFirstKey();
114 boolean rc = Bytes.toString(key).equals("testkey");
115 reader.close();
116
117 if (!rc) {
118 throw new Exception("Read back incorrect result: " +
119 Bytes.toStringBinary(key));
120 }
121 }
122
123 public static void main(String[] args) throws Exception {
124 if (args.length != 2) {
125 usage();
126 System.exit(1);
127 }
128
129 Configuration conf = new Configuration();
130 Path path = new Path(args[0]);
131 FileSystem fs = path.getFileSystem(conf);
132 try {
133 doSmokeTest(fs, path, args[1]);
134 } finally {
135 fs.delete(path, false);
136 }
137 System.out.println("SUCCESS");
138 }
139 }