View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.mapreduce;
19  
20  import static org.junit.Assert.assertEquals;
21  import static org.junit.Assert.assertTrue;
22  
23  import java.io.IOException;
24  import java.security.PrivilegedExceptionAction;
25  import java.util.ArrayList;
26  import java.util.Arrays;
27  import java.util.HashSet;
28  import java.util.List;
29  import java.util.Set;
30  import java.util.UUID;
31  
32  import org.apache.commons.logging.Log;
33  import org.apache.commons.logging.LogFactory;
34  import org.apache.hadoop.conf.Configurable;
35  import org.apache.hadoop.conf.Configuration;
36  import org.apache.hadoop.fs.FSDataOutputStream;
37  import org.apache.hadoop.fs.FileStatus;
38  import org.apache.hadoop.fs.FileSystem;
39  import org.apache.hadoop.fs.Path;
40  import org.apache.hadoop.hbase.Cell;
41  import org.apache.hadoop.hbase.CellUtil;
42  import org.apache.hadoop.hbase.HBaseTestingUtility;
43  import org.apache.hadoop.hbase.HConstants;
44  import org.apache.hadoop.hbase.LargeTests;
45  import org.apache.hadoop.hbase.client.HBaseAdmin;
46  import org.apache.hadoop.hbase.client.HTable;
47  import org.apache.hadoop.hbase.client.Result;
48  import org.apache.hadoop.hbase.client.ResultScanner;
49  import org.apache.hadoop.hbase.client.Scan;
50  import org.apache.hadoop.hbase.protobuf.generated.VisibilityLabelsProtos.VisibilityLabelsResponse;
51  import org.apache.hadoop.hbase.security.User;
52  import org.apache.hadoop.hbase.security.visibility.Authorizations;
53  import org.apache.hadoop.hbase.security.visibility.ScanLabelGenerator;
54  import org.apache.hadoop.hbase.security.visibility.SimpleScanLabelGenerator;
55  import org.apache.hadoop.hbase.security.visibility.VisibilityClient;
56  import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
57  import org.apache.hadoop.hbase.security.visibility.VisibilityController;
58  import org.apache.hadoop.hbase.security.visibility.VisibilityUtils;
59  import org.apache.hadoop.hbase.util.Bytes;
60  import org.apache.hadoop.mapred.Utils.OutputFileUtils.OutputFilesFilter;
61  import org.apache.hadoop.util.Tool;
62  import org.apache.hadoop.util.ToolRunner;
63  import org.junit.AfterClass;
64  import org.junit.BeforeClass;
65  import org.junit.Test;
66  import org.junit.experimental.categories.Category;
67  
68  @Category(LargeTests.class)
69  public class TestImportTSVWithVisibilityLabels implements Configurable {
70  
71    protected static final Log LOG = LogFactory.getLog(TestImportTSVWithVisibilityLabels.class);
72    protected static final String NAME = TestImportTsv.class.getSimpleName();
73    protected static HBaseTestingUtility util = new HBaseTestingUtility();
74  
75    /**
76     * Delete the tmp directory after running doMROnTableTest. Boolean. Default is
77     * false.
78     */
79    protected static final String DELETE_AFTER_LOAD_CONF = NAME + ".deleteAfterLoad";
80  
81    /**
82     * Force use of combiner in doMROnTableTest. Boolean. Default is true.
83     */
84    protected static final String FORCE_COMBINER_CONF = NAME + ".forceCombiner";
85  
86    private final String FAMILY = "FAM";
87    private final static String TOPSECRET = "topsecret";
88    private final static String PUBLIC = "public";
89    private final static String PRIVATE = "private";
90    private final static String CONFIDENTIAL = "confidential";
91    private final static String SECRET = "secret";
92    private static User SUPERUSER;
93    private static Configuration conf;
94  
95    @Override
96    public Configuration getConf() {
97      return util.getConfiguration();
98    }
99  
100   @Override
101   public void setConf(Configuration conf) {
102     throw new IllegalArgumentException("setConf not supported");
103   }
104 
105   @BeforeClass
106   public static void provisionCluster() throws Exception {
107     conf = util.getConfiguration();
108     SUPERUSER = User.createUserForTesting(conf, "admin", new String[] { "supergroup" });
109     conf.set("hbase.superuser", "admin,"+User.getCurrent().getName());
110     conf.setInt("hfile.format.version", 3);
111     conf.set("hbase.coprocessor.master.classes", VisibilityController.class.getName());
112     conf.set("hbase.coprocessor.region.classes", VisibilityController.class.getName());
113     conf.setClass(VisibilityUtils.VISIBILITY_LABEL_GENERATOR_CLASS, SimpleScanLabelGenerator.class,
114         ScanLabelGenerator.class);
115     util.startMiniCluster();
116     // Wait for the labels table to become available
117     util.waitTableEnabled(VisibilityConstants.LABELS_TABLE_NAME.getName(), 50000);
118     createLabels();
119     HBaseAdmin admin = new HBaseAdmin(util.getConfiguration());
120     util.startMiniMapReduceCluster();
121   }
122 
123   private static void createLabels() throws IOException, InterruptedException {
124     PrivilegedExceptionAction<VisibilityLabelsResponse> action =
125         new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
126       @Override
127       public VisibilityLabelsResponse run() throws Exception {
128         String[] labels = { SECRET, TOPSECRET, CONFIDENTIAL, PUBLIC, PRIVATE };
129         try {
130           VisibilityClient.addLabels(conf, labels);
131           LOG.info("Added labels ");
132         } catch (Throwable t) {
133           LOG.error("Error in adding labels" , t);
134           throw new IOException(t);
135         }
136         return null;
137       }
138     };
139     SUPERUSER.runAs(action);
140   }
141 
142   @AfterClass
143   public static void releaseCluster() throws Exception {
144     util.shutdownMiniMapReduceCluster();
145     util.shutdownMiniCluster();
146   }
147 
148   @Test
149   public void testMROnTable() throws Exception {
150     String tableName = "test-" + UUID.randomUUID();
151 
152     // Prepare the arguments required for the test.
153     String[] args = new String[] {
154         "-D" + ImportTsv.MAPPER_CONF_KEY
155             + "=org.apache.hadoop.hbase.mapreduce.TsvImporterMapper",
156         "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
157         "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
158     String data = "KEY\u001bVALUE1\u001bVALUE2\u001bsecret&private\n";
159     util.createTable(tableName, FAMILY);
160     doMROnTableTest(util, FAMILY, data, args, 1);
161     util.deleteTable(tableName);
162   }
163 
164   @Test
165   public void testMROnTableWithBulkload() throws Exception {
166     String tableName = "test-" + UUID.randomUUID();
167     Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName), "hfiles");
168     // Prepare the arguments required for the test.
169     String[] args = new String[] {
170         "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
171         "-D" + ImportTsv.COLUMNS_CONF_KEY
172             + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
173         "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
174     String data = "KEY\u001bVALUE1\u001bVALUE2\u001bsecret&private\n";
175     util.createTable(tableName, FAMILY);
176     doMROnTableTest(util, FAMILY, data, args, 1);
177     util.deleteTable(tableName);
178   }
179 
180   @Test
181   public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
182     String table = "test-" + UUID.randomUUID();
183     String FAMILY = "FAM";
184     Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table),"hfiles");
185     // Prepare the arguments required for the test.
186     String[] args =
187         new String[] {
188             "-D" + ImportTsv.MAPPER_CONF_KEY
189                 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
190             "-D" + ImportTsv.COLUMNS_CONF_KEY
191                 + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
192             "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
193             "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table
194             };
195     String data = "KEY\u001bVALUE4\u001bVALUE8\u001bsecret&private\n";
196     doMROnTableTest(util, FAMILY, data, args, 4);
197     util.deleteTable(table);
198   }
199 
200   @Test
201   public void testMRWithOutputFormat() throws Exception {
202     String tableName = "test-" + UUID.randomUUID();
203     Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName), "hfiles");
204     // Prepare the arguments required for the test.
205     String[] args = new String[] {
206         "-D" + ImportTsv.MAPPER_CONF_KEY
207             + "=org.apache.hadoop.hbase.mapreduce.TsvImporterMapper",
208         "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
209         "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
210         "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName };
211     String data = "KEY\u001bVALUE4\u001bVALUE8\u001bsecret&private\n";
212     util.createTable(tableName, FAMILY);
213     doMROnTableTest(util, FAMILY, data, args, 1);
214     util.deleteTable(tableName);
215   }
216 
217   /**
218    * Run an ImportTsv job and perform basic validation on the results. Returns
219    * the ImportTsv <code>Tool</code> instance so that other tests can inspect it
220    * for further validation as necessary. This method is static to insure
221    * non-reliance on instance's util/conf facilities.
222    *
223    * @param args
224    *          Any arguments to pass BEFORE inputFile path is appended.
225    * @return The Tool instance used to run the test.
226    */
227   protected static Tool doMROnTableTest(HBaseTestingUtility util, String family, String data,
228       String[] args, int valueMultiplier) throws Exception {
229     String table = args[args.length - 1];
230     Configuration conf = new Configuration(util.getConfiguration());
231 
232     // populate input file
233     FileSystem fs = FileSystem.get(conf);
234     Path inputPath = fs.makeQualified(new Path(util.getDataTestDirOnTestFS(table), "input.dat"));
235     FSDataOutputStream op = fs.create(inputPath, true);
236     if (data == null) {
237       data = "KEY\u001bVALUE1\u001bVALUE2\n";
238     }
239     op.write(Bytes.toBytes(data));
240     op.close();
241     LOG.debug(String.format("Wrote test data to file: %s", inputPath));
242 
243     if (conf.getBoolean(FORCE_COMBINER_CONF, true)) {
244       LOG.debug("Forcing combiner.");
245       conf.setInt("min.num.spills.for.combine", 1);
246     }
247 
248     // run the import
249     List<String> argv = new ArrayList<String>(Arrays.asList(args));
250     argv.add(inputPath.toString());
251     Tool tool = new ImportTsv();
252     LOG.debug("Running ImportTsv with arguments: " + argv);
253     assertEquals(0, ToolRunner.run(conf, tool, argv.toArray(args)));
254 
255     // Perform basic validation. If the input args did not include
256     // ImportTsv.BULK_OUTPUT_CONF_KEY then validate data in the table.
257     // Otherwise, validate presence of hfiles.
258     boolean createdHFiles = false;
259     String outputPath = null;
260     for (String arg : argv) {
261       if (arg.contains(ImportTsv.BULK_OUTPUT_CONF_KEY)) {
262         createdHFiles = true;
263         // split '-Dfoo=bar' on '=' and keep 'bar'
264         outputPath = arg.split("=")[1];
265         break;
266       }
267     }
268     LOG.debug("validating the table " + createdHFiles);
269     if (createdHFiles)
270      validateHFiles(fs, outputPath, family);
271     else
272       validateTable(conf, table, family, valueMultiplier);
273 
274     if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
275       LOG.debug("Deleting test subdirectory");
276       util.cleanupDataTestDirOnTestFS(table);
277     }
278     return tool;
279   }
280 
281   /**
282    * Confirm ImportTsv via HFiles on fs.
283    */
284   private static void validateHFiles(FileSystem fs, String outputPath, String family)
285       throws IOException {
286 
287     // validate number and content of output columns
288     LOG.debug("Validating HFiles.");
289     Set<String> configFamilies = new HashSet<String>();
290     configFamilies.add(family);
291     Set<String> foundFamilies = new HashSet<String>();
292     for (FileStatus cfStatus : fs.listStatus(new Path(outputPath), new OutputFilesFilter())) {
293       LOG.debug("The output path has files");
294       String[] elements = cfStatus.getPath().toString().split(Path.SEPARATOR);
295       String cf = elements[elements.length - 1];
296       foundFamilies.add(cf);
297       assertTrue(String.format(
298           "HFile ouput contains a column family (%s) not present in input families (%s)", cf,
299           configFamilies), configFamilies.contains(cf));
300       for (FileStatus hfile : fs.listStatus(cfStatus.getPath())) {
301         assertTrue(String.format("HFile %s appears to contain no data.", hfile.getPath()),
302             hfile.getLen() > 0);
303       }
304     }
305   }
306 
307   /**
308    * Confirm ImportTsv via data in online table.
309    */
310   private static void validateTable(Configuration conf, String tableName, String family,
311       int valueMultiplier) throws IOException {
312 
313     LOG.debug("Validating table.");
314     HTable table = new HTable(conf, tableName);
315     boolean verified = false;
316     long pause = conf.getLong("hbase.client.pause", 5 * 1000);
317     int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
318     for (int i = 0; i < numRetries; i++) {
319       try {
320         Scan scan = new Scan();
321         // Scan entire family.
322         scan.addFamily(Bytes.toBytes(family));
323         scan.setAuthorizations(new Authorizations("secret","private"));
324         ResultScanner resScanner = table.getScanner(scan);
325         Result[] next = resScanner.next(5);
326         assertEquals(1, next.length);
327         for (Result res : resScanner) {
328           LOG.debug("Getting results " + res.size());
329           assertTrue(res.size() == 2);
330           List<Cell> kvs = res.listCells();
331           assertTrue(CellUtil.matchingRow(kvs.get(0), Bytes.toBytes("KEY")));
332           assertTrue(CellUtil.matchingRow(kvs.get(1), Bytes.toBytes("KEY")));
333           assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier)));
334           assertTrue(CellUtil.matchingValue(kvs.get(1),
335               Bytes.toBytes("VALUE" + 2 * valueMultiplier)));
336           // Only one result set is expected, so let it loop.
337         }
338         verified = true;
339         break;
340       } catch (NullPointerException e) {
341         // If here, a cell was empty. Presume its because updates came in
342         // after the scanner had been opened. Wait a while and retry.
343       }
344       try {
345         Thread.sleep(pause);
346       } catch (InterruptedException e) {
347         // continue
348       }
349     }
350     table.close();
351     assertTrue(verified);
352   }
353 
354 }