View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.test;
19  
20  import java.io.IOException;
21  import java.security.PrivilegedExceptionAction;
22  import java.util.Arrays;
23  import java.util.Iterator;
24  import java.util.UUID;
25  
26  import org.apache.commons.cli.CommandLine;
27  import org.apache.commons.logging.Log;
28  import org.apache.commons.logging.LogFactory;
29  import org.apache.hadoop.conf.Configuration;
30  import org.apache.hadoop.conf.Configured;
31  import org.apache.hadoop.fs.Path;
32  import org.apache.hadoop.hbase.Cell;
33  import org.apache.hadoop.hbase.HBaseConfiguration;
34  import org.apache.hadoop.hbase.HColumnDescriptor;
35  import org.apache.hadoop.hbase.HRegionLocation;
36  import org.apache.hadoop.hbase.HTableDescriptor;
37  import org.apache.hadoop.hbase.IntegrationTestingUtility;
38  import org.apache.hadoop.hbase.testclassification.IntegrationTests;
39  import org.apache.hadoop.hbase.TableName;
40  import org.apache.hadoop.hbase.chaos.factories.MonkeyFactory;
41  import org.apache.hadoop.hbase.client.Admin;
42  import org.apache.hadoop.hbase.client.BufferedMutator;
43  import org.apache.hadoop.hbase.client.BufferedMutatorParams;
44  import org.apache.hadoop.hbase.client.Delete;
45  import org.apache.hadoop.hbase.client.HBaseAdmin;
46  import org.apache.hadoop.hbase.client.HConnection;
47  import org.apache.hadoop.hbase.client.HConnectionManager;
48  import org.apache.hadoop.hbase.client.Put;
49  import org.apache.hadoop.hbase.client.Result;
50  import org.apache.hadoop.hbase.client.Scan;
51  import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
52  import org.apache.hadoop.hbase.io.hfile.HFile;
53  import org.apache.hadoop.hbase.mapreduce.Import;
54  import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
55  import org.apache.hadoop.hbase.security.User;
56  import org.apache.hadoop.hbase.security.access.AccessControlClient;
57  import org.apache.hadoop.hbase.security.access.Permission;
58  import org.apache.hadoop.hbase.security.visibility.Authorizations;
59  import org.apache.hadoop.hbase.security.visibility.CellVisibility;
60  import org.apache.hadoop.hbase.security.visibility.VisibilityClient;
61  import org.apache.hadoop.hbase.security.visibility.VisibilityController;
62  import org.apache.hadoop.hbase.util.AbstractHBaseTool;
63  import org.apache.hadoop.hbase.util.Bytes;
64  import org.apache.hadoop.io.BytesWritable;
65  import org.apache.hadoop.mapreduce.Counter;
66  import org.apache.hadoop.mapreduce.CounterGroup;
67  import org.apache.hadoop.mapreduce.Counters;
68  import org.apache.hadoop.mapreduce.Job;
69  import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
70  import org.apache.hadoop.util.Tool;
71  import org.apache.hadoop.util.ToolRunner;
72  import org.junit.Test;
73  import org.junit.experimental.categories.Category;
74  
75  /**
76   * IT test used to verify the deletes with visibility labels.
77   * The test creates three tables tablename_0, tablename_1 and tablename_2 and each table 
78   * is associated with a unique pair of labels.
79   * Another common table with the name 'commontable' is created and it has the data combined 
80   * from all these 3 tables such that there are 3 versions of every row but the visibility label 
81   * in every row corresponds to the table from which the row originated.  
82   * Then deletes are issued to the common table by selecting the visibility label 
83   * associated with each of the smaller tables. 
84   * After the delete is issued with one set of visibility labels we try to scan the common table 
85   * with each of the visibility pairs defined for the 3 tables.  
86   * So after the first delete is issued, a scan with the first set of visibility labels would 
87   * return zero result whereas the scan issued with the other two sets of visibility labels 
88   * should return all the rows corresponding to that set of visibility labels.  The above 
89   * process of delete and scan is repeated until after the last set of visibility labels are 
90   * used for the deletes the common table should not return any row.
91   * 
92   * To use this 
93   * ./hbase org.apache.hadoop.hbase.test.IntegrationTestBigLinkedListWithVisibility Loop 1 1 20000 /tmp 1 10000
94   * or 
95   * ./hbase org.apache.hadoop.hbase.IntegrationTestsDriver -r .*IntegrationTestBigLinkedListWithVisibility.*
96   */
97  @Category(IntegrationTests.class)
98  public class IntegrationTestBigLinkedListWithVisibility extends IntegrationTestBigLinkedList {
99  
100   private static final String CONFIDENTIAL = "confidential";
101   private static final String TOPSECRET = "topsecret";
102   private static final String SECRET = "secret";
103   private static final String PUBLIC = "public";
104   private static final String PRIVATE = "private";
105   private static final String EVERYONE = "everyone";
106   private static final String RESTRICTED = "restricted";
107   private static final String GROUP = "group";
108   private static final String PREVILIGED = "previliged";
109   private static final String OPEN = "open";
110   public static String labels = CONFIDENTIAL + "," + TOPSECRET + "," + SECRET + "," + RESTRICTED
111       + "," + PRIVATE + "," + PREVILIGED + "," + GROUP + "," + OPEN + "," + PUBLIC + "," + EVERYONE;
112   private static final String COMMA = ",";
113   private static final String UNDER_SCORE = "_";
114   public static int DEFAULT_TABLES_COUNT = 3;
115   public static String tableName = "tableName";
116   public static final String COMMON_TABLE_NAME = "commontable";
117   public static final String LABELS_KEY = "LABELS";
118   public static final String INDEX_KEY = "INDEX";
119   private static User USER;
120   private static final String OR = "|";
121   private static String USER_OPT = "user";
122   private static String userName = "user1";
123 
124   static class VisibilityGenerator extends Generator {
125     private static final Log LOG = LogFactory.getLog(VisibilityGenerator.class);
126 
127     @Override
128     protected void createSchema() throws IOException {
129       LOG.info("Creating tables");
130       // Create three tables
131       boolean acl = AccessControlClient.isAccessControllerRunning(getConf());
132       if(!acl) {
133         LOG.info("No ACL available.");
134       }
135       Admin admin = new HBaseAdmin(getConf());
136       for (int i = 0; i < DEFAULT_TABLES_COUNT; i++) {
137         TableName tableName = IntegrationTestBigLinkedListWithVisibility.getTableName(i);
138         createTable(admin, tableName, false, acl);
139       }
140       TableName tableName = TableName.valueOf(COMMON_TABLE_NAME);
141       createTable(admin, tableName, true, acl);
142       admin.close();
143     }
144 
145     private void createTable(Admin admin, TableName tableName, boolean setVersion,
146         boolean acl) throws IOException {
147       if (!admin.tableExists(tableName)) {
148         HTableDescriptor htd = new HTableDescriptor(tableName);
149         HColumnDescriptor family = new HColumnDescriptor(FAMILY_NAME);
150         if (setVersion) {
151           family.setMaxVersions(DEFAULT_TABLES_COUNT);
152         }
153         htd.addFamily(family);
154         admin.createTable(htd);
155         if (acl) {
156           LOG.info("Granting permissions for user " + USER.getShortName());
157           Permission.Action[] actions = { Permission.Action.READ };
158           try {
159             AccessControlClient.grant(getConf(), tableName, USER.getShortName(), null, null,
160                 actions);
161           } catch (Throwable e) {
162             LOG.fatal("Error in granting permission for the user " + USER.getShortName(), e);
163             throw new IOException(e);
164           }
165         }
166       }
167     }
168 
169     @Override
170     protected void setMapperForGenerator(Job job) {
171       job.setMapperClass(VisibilityGeneratorMapper.class);
172     }
173 
174     static class VisibilityGeneratorMapper extends GeneratorMapper {
175       BufferedMutator[] tables = new BufferedMutator[DEFAULT_TABLES_COUNT];
176 
177       @Override
178       protected void setup(org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException,
179           InterruptedException {
180         super.setup(context);
181       }
182 
183       @Override
184       protected void instantiateHTable() throws IOException {
185         for (int i = 0; i < DEFAULT_TABLES_COUNT; i++) {
186           BufferedMutatorParams params = new BufferedMutatorParams(getTableName(i));
187           params.writeBufferSize(4 * 1024 * 1024);
188           BufferedMutator table = connection.getBufferedMutator(params);
189           this.tables[i] = table;
190         }
191       }
192 
193       @Override
194       protected void cleanup(org.apache.hadoop.mapreduce.Mapper.Context context)
195           throws IOException, InterruptedException {
196         for (int i = 0; i < DEFAULT_TABLES_COUNT; i++) {
197           if (tables[i] != null) {
198             tables[i].close();
199           }
200         }
201       }
202 
203       @Override
204       protected void persist(org.apache.hadoop.mapreduce.Mapper.Context output, long count,
205           byte[][] prev, byte[][] current, byte[] id) throws IOException {
206         String visibilityExps = "";
207         String[] split = labels.split(COMMA);
208         for (int i = 0; i < current.length; i++) {
209           for (int j = 0; j < DEFAULT_TABLES_COUNT; j++) {
210             Put put = new Put(current[i]);
211             put.add(FAMILY_NAME, COLUMN_PREV, prev == null ? NO_KEY : prev[i]);
212             
213             if (count >= 0) {
214               put.add(FAMILY_NAME, COLUMN_COUNT, Bytes.toBytes(count + i));
215             }
216             if (id != null) {
217               put.add(FAMILY_NAME, COLUMN_CLIENT, id);
218             }
219             visibilityExps = split[j * 2] + OR + split[(j * 2) + 1];
220             put.setCellVisibility(new CellVisibility(visibilityExps));
221             tables[j].mutate(put);
222             try {
223               Thread.sleep(1);
224             } catch (InterruptedException e) {
225               throw new IOException();
226             }
227           }
228           if (i % 1000 == 0) {
229             // Tickle progress every so often else maprunner will think us hung
230             output.progress();
231           }
232         }
233       }
234     }
235   }
236 
237   static class Copier extends Configured implements Tool {
238     private static final Log LOG = LogFactory.getLog(Copier.class);
239     private TableName tableName;
240     private int labelIndex;
241     private boolean delete;
242 
243     public Copier(TableName tableName, int index, boolean delete) {
244       this.tableName = tableName;
245       this.labelIndex = index;
246       this.delete = delete;
247     }
248 
249     public int runCopier(String outputDir) throws Exception {
250       Job job = null;
251       Scan scan = null;
252       job = new Job(getConf());
253       job.setJobName("Data copier");
254       job.getConfiguration().setInt("INDEX", labelIndex);
255       job.getConfiguration().set("LABELS", labels);
256       job.setJarByClass(getClass());
257       scan = new Scan();
258       scan.setCacheBlocks(false);
259       scan.setRaw(true);
260 
261       String[] split = labels.split(COMMA);
262       scan.setAuthorizations(new Authorizations(split[this.labelIndex * 2],
263           split[(this.labelIndex * 2) + 1]));
264       if (delete) {
265         LOG.info("Running deletes");
266       } else {
267         LOG.info("Running copiers");
268       }
269       if (delete) {
270         TableMapReduceUtil.initTableMapperJob(tableName.getNameAsString(), scan,
271             VisibilityDeleteImport.class, null, null, job);
272       } else {
273         TableMapReduceUtil.initTableMapperJob(tableName.getNameAsString(), scan,
274             VisibilityImport.class, null, null, job);
275       }
276       job.getConfiguration().setBoolean("mapreduce.map.speculative", false);
277       job.getConfiguration().setBoolean("mapreduce.reduce.speculative", false);
278       TableMapReduceUtil.initTableReducerJob(COMMON_TABLE_NAME, null, job, null, null, null, null);
279       TableMapReduceUtil.addDependencyJars(job);
280       TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);
281       TableMapReduceUtil.initCredentials(job);
282       job.setNumReduceTasks(0);
283       boolean success = job.waitForCompletion(true);
284       return success ? 0 : 1;
285     }
286 
287     @Override
288     public int run(String[] arg0) throws Exception {
289       // TODO Auto-generated method stub
290       return 0;
291     }
292   }
293 
294   static class VisibilityImport extends Import.Importer {
295     private int index;
296     private String labels;
297     private String[] split;
298 
299     @Override
300     public void setup(org.apache.hadoop.mapreduce.Mapper.Context context) {
301       index = context.getConfiguration().getInt(INDEX_KEY, -1);
302       labels = context.getConfiguration().get(LABELS_KEY);
303       split = labels.split(COMMA);
304       super.setup(context);
305     }
306 
307     @Override
308     protected void addPutToKv(Put put, Cell kv) throws IOException {
309       String visibilityExps = split[index * 2] + OR + split[(index * 2) + 1];
310       put.setCellVisibility(new CellVisibility(visibilityExps));
311       super.addPutToKv(put, kv);
312     }
313   }
314 
315   static class VisibilityDeleteImport extends Import.Importer {
316     private int index;
317     private String labels;
318     private String[] split;
319 
320     @Override
321     public void setup(org.apache.hadoop.mapreduce.Mapper.Context context) {
322       index = context.getConfiguration().getInt(INDEX_KEY, -1);
323       labels = context.getConfiguration().get(LABELS_KEY);
324       split = labels.split(COMMA);
325       super.setup(context);
326     }
327 
328     // Creating delete here
329     @Override
330     protected void processKV(ImmutableBytesWritable key, Result result,
331         org.apache.hadoop.mapreduce.Mapper.Context context, Put put,
332         org.apache.hadoop.hbase.client.Delete delete) throws 
333         IOException, InterruptedException {
334       String visibilityExps = split[index * 2] + OR + split[(index * 2) + 1];
335       for (Cell kv : result.rawCells()) {
336         // skip if we filter it out
337         if (kv == null)
338           continue;
339         // Create deletes here
340         if (delete == null) {
341           delete = new Delete(key.get());
342         }
343         delete.setCellVisibility(new CellVisibility(visibilityExps));
344         delete.deleteFamily(kv.getFamily());
345       }
346       if (delete != null) {
347         context.write(key, delete);
348       }
349     }
350   }
351 
352   @Override
353   protected void addOptions() {
354     super.addOptions();
355     addOptWithArg("u", USER_OPT, "User name");
356   }
357   
358   @Override
359   protected void processOptions(CommandLine cmd) {
360     super.processOptions(cmd);
361     if (cmd.hasOption(USER_OPT)) {
362       userName = cmd.getOptionValue(USER_OPT);
363     }
364     
365   }
366   @Override
367   public void setUpCluster() throws Exception {
368     util = getTestingUtil(null);
369     Configuration conf = util.getConfiguration();
370     conf.setInt(HFile.FORMAT_VERSION_KEY, 3);
371     conf.set("hbase.coprocessor.master.classes", VisibilityController.class.getName());
372     conf.set("hbase.coprocessor.region.classes", VisibilityController.class.getName());
373     conf.set("hbase.superuser", User.getCurrent().getName());
374     conf.setBoolean("dfs.permissions", false);
375     USER = User.createUserForTesting(conf, userName, new String[] {});
376     super.setUpCluster();
377     addLabels();
378   }
379 
380   static TableName getTableName(int i) {
381     return TableName.valueOf(tableName + UNDER_SCORE + i);
382   }
383 
384   private void addLabels() throws Exception {
385     try {
386       VisibilityClient.addLabels(util.getConfiguration(), labels.split(COMMA));
387       VisibilityClient.setAuths(util.getConfiguration(), labels.split(COMMA), USER.getName());
388     } catch (Throwable t) {
389       throw new IOException(t);
390     }
391   }
392 
393   static class VisibilityVerify extends Verify {
394     private static final Log LOG = LogFactory.getLog(VisibilityVerify.class);
395     private TableName tableName;
396     private int labelIndex;
397 
398     public VisibilityVerify(String tableName, int index) {
399       this.tableName = TableName.valueOf(tableName);
400       this.labelIndex = index;
401     }
402 
403     @Override
404     public int run(final Path outputDir, final int numReducers) throws Exception {
405       LOG.info("Running Verify with outputDir=" + outputDir + ", numReducers=" + numReducers);
406       PrivilegedExceptionAction<Integer> scanAction = new PrivilegedExceptionAction<Integer>() {
407         @Override
408         public Integer run() throws Exception {
409           return doVerify(outputDir, numReducers);
410         }
411       };
412       return USER.runAs(scanAction);
413     }
414 
415     private int doVerify(Path outputDir, int numReducers) throws IOException, InterruptedException,
416         ClassNotFoundException {
417       job = new Job(getConf());
418 
419       job.setJobName("Link Verifier");
420       job.setNumReduceTasks(numReducers);
421       job.setJarByClass(getClass());
422 
423       setJobScannerConf(job);
424 
425       Scan scan = new Scan();
426       scan.addColumn(FAMILY_NAME, COLUMN_PREV);
427       scan.setCaching(10000);
428       scan.setCacheBlocks(false);
429       String[] split = labels.split(COMMA);
430 
431       scan.setAuthorizations(new Authorizations(split[this.labelIndex * 2],
432           split[(this.labelIndex * 2) + 1]));
433 
434       TableMapReduceUtil.initTableMapperJob(tableName.getName(), scan, VerifyMapper.class,
435           BytesWritable.class, BytesWritable.class, job);
436       TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);
437 
438       job.getConfiguration().setBoolean("mapreduce.map.speculative", false);
439 
440       job.setReducerClass(VerifyReducer.class);
441       job.setOutputFormatClass(TextOutputFormat.class);
442       TextOutputFormat.setOutputPath(job, outputDir);
443       boolean success = job.waitForCompletion(true);
444 
445       return success ? 0 : 1;
446     }
447 
448     @Override
449     protected void handleFailure(Counters counters) throws IOException {
450       Configuration conf = job.getConfiguration();
451       HConnection conn = HConnectionManager.getConnection(conf);
452       TableName tableName = TableName.valueOf(COMMON_TABLE_NAME);
453       CounterGroup g = counters.getGroup("undef");
454       Iterator<Counter> it = g.iterator();
455       while (it.hasNext()) {
456         String keyString = it.next().getName();
457         byte[] key = Bytes.toBytes(keyString);
458         HRegionLocation loc = conn.relocateRegion(tableName, key);
459         LOG.error("undefined row " + keyString + ", " + loc);
460       }
461       g = counters.getGroup("unref");
462       it = g.iterator();
463       while (it.hasNext()) {
464         String keyString = it.next().getName();
465         byte[] key = Bytes.toBytes(keyString);
466         HRegionLocation loc = conn.relocateRegion(tableName, key);
467         LOG.error("unreferred row " + keyString + ", " + loc);
468       }
469     }
470   }
471 
472   static class VisibilityLoop extends Loop {
473     private static final int SLEEP_IN_MS = 5000;
474     private static final Log LOG = LogFactory.getLog(VisibilityLoop.class);
475     IntegrationTestBigLinkedListWithVisibility it;
476 
477     @Override
478     protected void runGenerator(int numMappers, long numNodes, String outputDir, Integer width,
479         Integer wrapMuplitplier) throws Exception {
480       Path outputPath = new Path(outputDir);
481       UUID uuid = UUID.randomUUID(); // create a random UUID.
482       Path generatorOutput = new Path(outputPath, uuid.toString());
483 
484       Generator generator = new VisibilityGenerator();
485       generator.setConf(getConf());
486       int retCode = generator.run(numMappers, numNodes, generatorOutput, width, wrapMuplitplier);
487       if (retCode > 0) {
488         throw new RuntimeException("Generator failed with return code: " + retCode);
489       }
490     }
491 
492     protected void runDelete(int numMappers, long numNodes, String outputDir, Integer width,
493         Integer wrapMuplitplier, int tableIndex) throws Exception {
494       LOG.info("Running copier on table "+IntegrationTestBigLinkedListWithVisibility.getTableName(tableIndex));
495       Copier copier = new Copier(
496           IntegrationTestBigLinkedListWithVisibility.getTableName(tableIndex), tableIndex, true);
497       copier.setConf(getConf());
498       copier.runCopier(outputDir);
499       Thread.sleep(SLEEP_IN_MS);
500     }
501 
502     protected void runVerify(String outputDir, int numReducers, long expectedNumNodes,
503         boolean allTables) throws Exception {
504       Path outputPath = new Path(outputDir);
505 
506       if (allTables) {
507         for (int i = 0; i < DEFAULT_TABLES_COUNT; i++) {
508           LOG.info("Verifying table " + i);
509           sleep(SLEEP_IN_MS);
510           UUID uuid = UUID.randomUUID(); // create a random UUID.
511           Path iterationOutput = new Path(outputPath, uuid.toString());
512           Verify verify = new VisibilityVerify(getTableName(i).getNameAsString(), i);
513           verify(numReducers, expectedNumNodes, iterationOutput, verify);
514         }
515       }
516       for (int i = 0; i < DEFAULT_TABLES_COUNT; i++) {
517         runVerifyCommonTable(outputDir, numReducers, expectedNumNodes, i);
518       }
519     }
520 
521     private void runVerify(String outputDir, int numReducers, long expectedNodes, int tableIndex)
522         throws Exception {
523       long temp = expectedNodes;
524       for (int i = 0; i < DEFAULT_TABLES_COUNT; i++) {
525         if (i <= tableIndex) {
526           expectedNodes = 0;
527         } else {
528           expectedNodes = temp;
529         }
530         LOG.info("Verifying data in the table with index "+i+ " and expected nodes is "+expectedNodes);
531         runVerifyCommonTable(outputDir, numReducers, expectedNodes, i);
532       }
533     }
534 
535     private void sleep(long ms) throws InterruptedException {
536       Thread.sleep(ms);
537     }
538 
539     protected void runVerifyCommonTable(String outputDir, int numReducers, long expectedNumNodes,
540         int index) throws Exception {
541       LOG.info("Verifying common table with index " + index);
542       sleep(SLEEP_IN_MS);
543       Path outputPath = new Path(outputDir);
544       UUID uuid = UUID.randomUUID(); // create a random UUID.
545       Path iterationOutput = new Path(outputPath, uuid.toString());
546       Verify verify = new VisibilityVerify(TableName.valueOf(COMMON_TABLE_NAME).getNameAsString(),
547           index);
548       verify(numReducers, expectedNumNodes, iterationOutput, verify);
549     }
550 
551     protected void runCopier(String outputDir) throws Exception {
552       for (int i = 0; i < DEFAULT_TABLES_COUNT; i++) {
553         LOG.info("Running copier " + IntegrationTestBigLinkedListWithVisibility.getTableName(i));
554         sleep(SLEEP_IN_MS);
555         Copier copier = new Copier(IntegrationTestBigLinkedListWithVisibility.getTableName(i), i,
556             false);
557         copier.setConf(getConf());
558         copier.runCopier(outputDir);
559       }
560     }
561 
562     private void verify(int numReducers, long expectedNumNodes, 
563         Path iterationOutput, Verify verify) throws Exception {
564       verify.setConf(getConf());
565       int retCode = verify.run(iterationOutput, numReducers);
566       if (retCode > 0) {
567         throw new RuntimeException("Verify.run failed with return code: " + retCode);
568       }
569 
570       if (!verify.verify(expectedNumNodes)) {
571         throw new RuntimeException("Verify.verify failed");
572       }
573 
574       LOG.info("Verify finished with succees. Total nodes=" + expectedNumNodes);
575     }
576 
577     @Override
578     public int run(String[] args) throws Exception {
579       if (args.length < 5) {
580         System.err
581             .println("Usage: Loop <num iterations> " +
582                 "<num mappers> <num nodes per mapper> <output dir> " +
583                 "<num reducers> [<width> <wrap multiplier>]");
584         return 1;
585       }
586       LOG.info("Running Loop with args:" + Arrays.deepToString(args));
587 
588       int numIterations = Integer.parseInt(args[0]);
589       int numMappers = Integer.parseInt(args[1]);
590       long numNodes = Long.parseLong(args[2]);
591       String outputDir = args[3];
592       int numReducers = Integer.parseInt(args[4]);
593       Integer width = (args.length < 6) ? null : Integer.parseInt(args[5]);
594       Integer wrapMuplitplier = (args.length < 7) ? null : Integer.parseInt(args[6]);
595 
596       long expectedNumNodes = 0;
597 
598       if (numIterations < 0) {
599         numIterations = Integer.MAX_VALUE; // run indefinitely (kind of)
600       }
601 
602       for (int i = 0; i < numIterations; i++) {
603         LOG.info("Starting iteration = " + i);
604         LOG.info("Generating data");
605         runGenerator(numMappers, numNodes, outputDir, width, wrapMuplitplier);
606         expectedNumNodes += numMappers * numNodes;
607         // Copying wont work because expressions are not returned back to the
608         // client
609         LOG.info("Running copier");
610         sleep(SLEEP_IN_MS);
611         runCopier(outputDir);
612         LOG.info("Verifying copied data");
613         sleep(SLEEP_IN_MS);
614         runVerify(outputDir, numReducers, expectedNumNodes, true);
615         sleep(SLEEP_IN_MS);
616         for (int j = 0; j < DEFAULT_TABLES_COUNT; j++) {
617           LOG.info("Deleting data on table with index: "+j);
618           runDelete(numMappers, numNodes, outputDir, width, wrapMuplitplier, j);
619           sleep(SLEEP_IN_MS);
620           LOG.info("Verifying common table after deleting");
621           runVerify(outputDir, numReducers, expectedNumNodes, j);
622           sleep(SLEEP_IN_MS);
623         }
624       }
625       return 0;
626     }
627   }
628 
629   @Override
630   @Test
631   public void testContinuousIngest() throws IOException, Exception {
632     // Loop <num iterations> <num mappers> <num nodes per mapper> <output dir>
633     // <num reducers>
634     int ret = ToolRunner.run(
635         getTestingUtil(getConf()).getConfiguration(),
636         new VisibilityLoop(),
637         new String[] { "1", "1", "20000",
638             util.getDataTestDirOnTestFS("IntegrationTestBigLinkedListWithVisibility").toString(),
639             "1", "10000" });
640     org.junit.Assert.assertEquals(0, ret);
641   }
642 
643   public static void main(String[] args) throws Exception {
644     Configuration conf = HBaseConfiguration.create();
645     IntegrationTestingUtility.setUseDistributedCluster(conf);
646     int ret = ToolRunner.run(conf, new IntegrationTestBigLinkedListWithVisibility(), args);
647     System.exit(ret);
648   }
649 
650   @Override
651   protected MonkeyFactory getDefaultMonkeyFactory() {
652     return MonkeyFactory.getFactory(MonkeyFactory.CALM);
653   }
654 
655   @Override
656   public int runTestFromCommandLine() throws Exception {
657     Tool tool = null;
658     Loop loop = new VisibilityLoop();
659     loop.it = this;
660     tool = loop;
661     return ToolRunner.run(getConf(), tool, otherArgs);
662   }
663 }