1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.hadoop.hbase.util;
21
22 import org.apache.hadoop.conf.Configuration;
23 import org.apache.hadoop.fs.Path;
24 import org.apache.hadoop.hbase.io.hfile.HFile;
25 import org.apache.hadoop.hdfs.DistributedFileSystem;
26
27 import java.net.URI;
28
29
30
31
32
33 public class CompressionTest {
34 protected static Path path = new Path(".hfile-comp-test");
35
36 public static void usage() {
37 System.err.println("Usage: CompressionTest HDFS_PATH none|gz|lzo");
38 System.exit(1);
39 }
40
41 protected static DistributedFileSystem openConnection(String urlString)
42 throws java.net.URISyntaxException, java.io.IOException {
43 URI dfsUri = new URI(urlString);
44 Configuration dfsConf = new Configuration();
45 DistributedFileSystem dfs = new DistributedFileSystem();
46 dfs.initialize(dfsUri, dfsConf);
47 return dfs;
48 }
49
50 protected static boolean closeConnection(DistributedFileSystem dfs) {
51 if (dfs != null) {
52 try {
53 dfs.close();
54 dfs = null;
55 } catch (Exception e) {
56 e.printStackTrace();
57 }
58 }
59 return dfs == null;
60 }
61
62 public static void main(String[] args) {
63 if (args.length != 2) usage();
64 try {
65 DistributedFileSystem dfs = openConnection(args[0]);
66 dfs.delete(path, false);
67 HFile.Writer writer = new HFile.Writer(dfs, path,
68 HFile.DEFAULT_BLOCKSIZE, args[1], null);
69 writer.append(Bytes.toBytes("testkey"), Bytes.toBytes("testval"));
70 writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval"));
71 writer.close();
72
73 HFile.Reader reader = new HFile.Reader(dfs, path, null, false);
74 reader.loadFileInfo();
75 byte[] key = reader.getFirstKey();
76 boolean rc = Bytes.toString(key).equals("testkey");
77 reader.close();
78
79 dfs.delete(path, false);
80 closeConnection(dfs);
81
82 if (rc) System.exit(0);
83 } catch (Exception e) {
84 e.printStackTrace();
85 }
86 System.out.println("FAILED");
87 System.exit(1);
88 }
89 }