1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.util;
20
21 import java.io.IOException;
22
23 import org.apache.commons.logging.Log;
24 import org.apache.commons.logging.LogFactory;
25 import org.apache.hadoop.classification.InterfaceAudience;
26 import org.apache.hadoop.classification.InterfaceStability;
27 import org.apache.hadoop.conf.Configuration;
28 import org.apache.hadoop.fs.FileSystem;
29 import org.apache.hadoop.fs.Path;
30 import org.apache.hadoop.hbase.HBaseConfiguration;
31 import org.apache.hadoop.hbase.io.compress.Compression;
32 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
33 import org.apache.hadoop.hbase.io.hfile.HFile;
34 import org.apache.hadoop.io.compress.Compressor;
35
36
37
38
39
40 @InterfaceAudience.Public
41 @InterfaceStability.Evolving
42 public class CompressionTest {
43 static final Log LOG = LogFactory.getLog(CompressionTest.class);
44
45 public static boolean testCompression(String codec) {
46 codec = codec.toLowerCase();
47
48 Compression.Algorithm a;
49
50 try {
51 a = Compression.getCompressionAlgorithmByName(codec);
52 } catch (IllegalArgumentException e) {
53 LOG.warn("Codec type: " + codec + " is not known");
54 return false;
55 }
56
57 try {
58 testCompression(a);
59 return true;
60 } catch (IOException ignored) {
61 LOG.warn("Can't instantiate codec: " + codec, ignored);
62 return false;
63 }
64 }
65
66 private final static Boolean[] compressionTestResults
67 = new Boolean[Compression.Algorithm.values().length];
68 static {
69 for (int i = 0 ; i < compressionTestResults.length ; ++i) {
70 compressionTestResults[i] = null;
71 }
72 }
73
74 public static void testCompression(Compression.Algorithm algo)
75 throws IOException {
76 if (compressionTestResults[algo.ordinal()] != null) {
77 if (compressionTestResults[algo.ordinal()]) {
78 return ;
79 } else {
80
81 throw new IOException("Compression algorithm '" + algo.getName() + "'" +
82 " previously failed test.");
83 }
84 }
85
86 try {
87 Compressor c = algo.getCompressor();
88 algo.returnCompressor(c);
89 compressionTestResults[algo.ordinal()] = true;
90 } catch (Throwable t) {
91 compressionTestResults[algo.ordinal()] = false;
92 throw new IOException(t);
93 }
94 }
95
96 protected static Path path = new Path(".hfile-comp-test");
97
98 public static void usage() {
99 System.err.println(
100 "Usage: CompressionTest <path> none|gz|lzo|snappy\n" +
101 "\n" +
102 "For example:\n" +
103 " hbase " + CompressionTest.class + " file:///tmp/testfile gz\n");
104 System.exit(1);
105 }
106
107 public static void doSmokeTest(FileSystem fs, Path path, String codec)
108 throws Exception {
109 Configuration conf = HBaseConfiguration.create();
110 HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
111 .withPath(fs, path)
112 .withCompression(codec)
113 .create();
114 writer.append(Bytes.toBytes("testkey"), Bytes.toBytes("testval"));
115 writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval"));
116 writer.close();
117
118 HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf));
119 reader.loadFileInfo();
120 byte[] key = reader.getFirstKey();
121 boolean rc = Bytes.toString(key).equals("testkey");
122 reader.close();
123
124 if (!rc) {
125 throw new Exception("Read back incorrect result: " +
126 Bytes.toStringBinary(key));
127 }
128 }
129
130 public static void main(String[] args) throws Exception {
131 if (args.length != 2) {
132 usage();
133 System.exit(1);
134 }
135
136 Configuration conf = new Configuration();
137 Path path = new Path(args[0]);
138 FileSystem fs = path.getFileSystem(conf);
139 try {
140 doSmokeTest(fs, path, args[1]);
141 } finally {
142 fs.delete(path, false);
143 }
144 System.out.println("SUCCESS");
145 }
146 }