1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.util;
20
21 import java.awt.*;
22 import java.io.IOException;
23 import java.util.Arrays;
24
25 import org.apache.commons.lang.StringUtils;
26 import org.apache.commons.logging.Log;
27 import org.apache.commons.logging.LogFactory;
28 import org.apache.hadoop.classification.InterfaceAudience;
29 import org.apache.hadoop.classification.InterfaceStability;
30 import org.apache.hadoop.conf.Configuration;
31 import org.apache.hadoop.fs.FileSystem;
32 import org.apache.hadoop.fs.Path;
33 import org.apache.hadoop.hbase.HBaseConfiguration;
34 import org.apache.hadoop.hbase.io.compress.Compression;
35 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
36 import org.apache.hadoop.hbase.io.hfile.HFile;
37 import org.apache.hadoop.io.compress.Compressor;
38
39
40
41
42
43 @InterfaceAudience.Public
44 @InterfaceStability.Evolving
45 public class CompressionTest {
46 static final Log LOG = LogFactory.getLog(CompressionTest.class);
47
48 public static boolean testCompression(String codec) {
49 codec = codec.toLowerCase();
50
51 Compression.Algorithm a;
52
53 try {
54 a = Compression.getCompressionAlgorithmByName(codec);
55 } catch (IllegalArgumentException e) {
56 LOG.warn("Codec type: " + codec + " is not known");
57 return false;
58 }
59
60 try {
61 testCompression(a);
62 return true;
63 } catch (IOException ignored) {
64 LOG.warn("Can't instantiate codec: " + codec, ignored);
65 return false;
66 }
67 }
68
69 private final static Boolean[] compressionTestResults
70 = new Boolean[Compression.Algorithm.values().length];
71 static {
72 for (int i = 0 ; i < compressionTestResults.length ; ++i) {
73 compressionTestResults[i] = null;
74 }
75 }
76
77 public static void testCompression(Compression.Algorithm algo)
78 throws IOException {
79 if (compressionTestResults[algo.ordinal()] != null) {
80 if (compressionTestResults[algo.ordinal()]) {
81 return ;
82 } else {
83
84 throw new IOException("Compression algorithm '" + algo.getName() + "'" +
85 " previously failed test.");
86 }
87 }
88
89 try {
90 Compressor c = algo.getCompressor();
91 algo.returnCompressor(c);
92 compressionTestResults[algo.ordinal()] = true;
93 } catch (Throwable t) {
94 compressionTestResults[algo.ordinal()] = false;
95 throw new IOException(t);
96 }
97 }
98
99 protected static Path path = new Path(".hfile-comp-test");
100
101 public static void usage() {
102
103 System.err.println(
104 "Usage: CompressionTest <path> " +
105 StringUtils.join( Compression.Algorithm.values(), "|").toLowerCase() +
106 "\n" +
107 "For example:\n" +
108 " hbase " + CompressionTest.class + " file:///tmp/testfile gz\n");
109 System.exit(1);
110 }
111
112 public static void doSmokeTest(FileSystem fs, Path path, String codec)
113 throws Exception {
114 Configuration conf = HBaseConfiguration.create();
115 HFile.Writer writer = HFile.getWriterFactoryNoCache(conf)
116 .withPath(fs, path)
117 .withCompression(codec)
118 .create();
119 writer.append(Bytes.toBytes("testkey"), Bytes.toBytes("testval"));
120 writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval"));
121 writer.close();
122
123 HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf));
124 reader.loadFileInfo();
125 byte[] key = reader.getFirstKey();
126 boolean rc = Bytes.toString(key).equals("testkey");
127 reader.close();
128
129 if (!rc) {
130 throw new Exception("Read back incorrect result: " +
131 Bytes.toStringBinary(key));
132 }
133 }
134
135 public static void main(String[] args) throws Exception {
136 if (args.length != 2) {
137 usage();
138 System.exit(1);
139 }
140
141 Configuration conf = new Configuration();
142 Path path = new Path(args[0]);
143 FileSystem fs = path.getFileSystem(conf);
144 try {
145 doSmokeTest(fs, path, args[1]);
146 } finally {
147 fs.delete(path, false);
148 }
149 System.out.println("SUCCESS");
150 }
151 }