1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.util;
21
22 import org.apache.commons.logging.Log;
23 import org.apache.commons.logging.LogFactory;
24 import org.apache.hadoop.conf.Configuration;
25 import org.apache.hadoop.fs.Path;
26 import org.apache.hadoop.hbase.io.hfile.Compression;
27 import org.apache.hadoop.hbase.io.hfile.HFile;
28 import org.apache.hadoop.hdfs.DistributedFileSystem;
29 import org.apache.hadoop.io.compress.Compressor;
30
31 import java.io.IOException;
32 import java.net.URI;
33
34
35
36
37
38 public class CompressionTest {
39 static final Log LOG = LogFactory.getLog(CompressionTest.class);
40
41 public static boolean testCompression(String codec) {
42 codec = codec.toLowerCase();
43
44 Compression.Algorithm a;
45
46 try {
47 a = Compression.getCompressionAlgorithmByName(codec);
48 } catch (IllegalArgumentException e) {
49 LOG.warn("Codec type: " + codec + " is not known");
50 return false;
51 }
52
53 try {
54 testCompression(a);
55 return true;
56 } catch (IOException ignored) {
57 LOG.warn("Can't instantiate codec: " + codec, ignored);
58 return false;
59 }
60 }
61
62 private final static Boolean[] compressionTestResults
63 = new Boolean[Compression.Algorithm.values().length];
64 static {
65 for (int i = 0 ; i < compressionTestResults.length ; ++i) {
66 compressionTestResults[i] = null;
67 }
68 }
69
70 public static void testCompression(Compression.Algorithm algo)
71 throws IOException {
72 if (compressionTestResults[algo.ordinal()] != null) {
73 if (compressionTestResults[algo.ordinal()]) {
74 return ;
75 } else {
76
77 throw new IOException("Compression algorithm '" + algo.getName() + "'" +
78 " previously failed test.");
79 }
80 }
81
82 try {
83 Compressor c = algo.getCompressor();
84 algo.returnCompressor(c);
85 compressionTestResults[algo.ordinal()] = true;
86 } catch (Throwable t) {
87 compressionTestResults[algo.ordinal()] = false;
88 throw new IOException(t);
89 }
90 }
91
92 protected static Path path = new Path(".hfile-comp-test");
93
94 public static void usage() {
95 System.err.println("Usage: CompressionTest HDFS_PATH none|gz|lzo");
96 System.exit(1);
97 }
98
99 protected static DistributedFileSystem openConnection(String urlString)
100 throws java.net.URISyntaxException, java.io.IOException {
101 URI dfsUri = new URI(urlString);
102 Configuration dfsConf = new Configuration();
103 DistributedFileSystem dfs = new DistributedFileSystem();
104 dfs.initialize(dfsUri, dfsConf);
105 return dfs;
106 }
107
108 protected static boolean closeConnection(DistributedFileSystem dfs) {
109 if (dfs != null) {
110 try {
111 dfs.close();
112 } catch (Exception e) {
113 e.printStackTrace();
114 }
115 }
116 return dfs == null;
117 }
118
119 public static void main(String[] args) {
120 if (args.length != 2) usage();
121 try {
122 DistributedFileSystem dfs = openConnection(args[0]);
123 dfs.delete(path, false);
124 HFile.Writer writer = new HFile.Writer(dfs, path,
125 HFile.DEFAULT_BLOCKSIZE, args[1], null);
126 writer.append(Bytes.toBytes("testkey"), Bytes.toBytes("testval"));
127 writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval"));
128 writer.close();
129
130 HFile.Reader reader = new HFile.Reader(dfs, path, null, false);
131 reader.loadFileInfo();
132 byte[] key = reader.getFirstKey();
133 boolean rc = Bytes.toString(key).equals("testkey");
134 reader.close();
135
136 dfs.delete(path, false);
137 closeConnection(dfs);
138
139 if (rc) System.exit(0);
140 } catch (Exception e) {
141 e.printStackTrace();
142 }
143 System.out.println("FAILED");
144 System.exit(1);
145 }
146 }