1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.mapreduce;
19  
20  import static org.junit.Assert.assertTrue;
21  
22  import java.io.IOException;
23  import java.util.UUID;
24  
25  import org.apache.commons.logging.Log;
26  import org.apache.commons.logging.LogFactory;
27  import org.apache.hadoop.conf.Configuration;
28  import org.apache.hadoop.fs.FileSystem;
29  import org.apache.hadoop.fs.Path;
30  import org.apache.hadoop.hbase.HBaseTestingUtility;
31  import org.apache.hadoop.hbase.LargeTests;
32  import org.apache.hadoop.hbase.client.HTable;
33  import org.apache.hadoop.hbase.client.Scan;
34  import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
35  import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
36  import org.apache.hadoop.hbase.util.Bytes;
37  import org.apache.hadoop.mapreduce.Job;
38  import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
39  import org.junit.AfterClass;
40  import org.junit.BeforeClass;
41  import org.junit.Test;
42  import org.junit.experimental.categories.Category;
43  
44  /**
45   * <p>
46   * Tests scanning a snapshot. Tests various scan start and stop row scenarios.
47   * This is set in a scan and tested in a MapReduce job to see if that is handed
48   * over and done properly too.
49   * </p>
50   */
51  @Category(LargeTests.class)
52  public class TestTableSnapshotInputFormatScan {
53  
54    static final Log LOG = LogFactory.getLog(TestTableSnapshotInputFormatScan.class);
55    static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
56  
57    static final byte[] TABLE_NAME = Bytes.toBytes("scantest");
58    static final byte[] SNAPSHOT_NAME = Bytes.toBytes("scantest_snaphot");
59    static final byte[] INPUT_FAMILY = Bytes.toBytes("contents");
60    static final String KEY_STARTROW = "startRow";
61    static final String KEY_LASTROW = "stpRow";
62  
63    private static HTable table = null;
64  
65    @BeforeClass
66    public static void setUpBeforeClass() throws Exception {
67      // config snapshot support
68      TEST_UTIL.getConfiguration().setBoolean(
69          SnapshotManager.HBASE_SNAPSHOT_ENABLED, true);
70      TEST_UTIL.getConfiguration().setBoolean(
71          "hbase.master.enabletable.roundrobin", true);
72  
73      // switch TIF to log at DEBUG level
74      TEST_UTIL.enableDebug(TableSnapshotInputFormat.class);
75  
76      // start mini hbase cluster
77      TEST_UTIL.startMiniCluster(3);
78  
79      // create and fill table
80      table = TEST_UTIL.createTable(TABLE_NAME, INPUT_FAMILY);
81      TEST_UTIL.createMultiRegions(table, INPUT_FAMILY);
82      TEST_UTIL.loadTable(table, INPUT_FAMILY);
83      TEST_UTIL.getHBaseAdmin().disableTable(TABLE_NAME);
84      TEST_UTIL.getHBaseAdmin().snapshot(SNAPSHOT_NAME, TABLE_NAME);
85      TEST_UTIL.getHBaseAdmin().enableTable(TABLE_NAME);
86  
87      // start MR cluster
88      TEST_UTIL.startMiniMapReduceCluster();
89    }
90  
91    @AfterClass
92    public static void tearDownAfterClass() throws Exception {
93      TEST_UTIL.shutdownMiniMapReduceCluster();
94      TEST_UTIL.shutdownMiniCluster();
95    }
96  
97    /**
98     * Tests a MR scan using specific start and stop rows.
99     * 
100    * @throws IOException
101    * @throws ClassNotFoundException
102    * @throws InterruptedException
103    */
104   @Test
105   public void testScanEmptyToEmpty() throws IOException, InterruptedException,
106       ClassNotFoundException {
107     testScan(null, null, null);
108   }
109 
110   /**
111    * Tests a MR scan using specific start and stop rows.
112    * 
113    * @throws IOException
114    * @throws ClassNotFoundException
115    * @throws InterruptedException
116    */
117   @Test
118   public void testScanEmptyToAPP() throws IOException, InterruptedException,
119       ClassNotFoundException {
120     testScan(null, "app", "apo");
121   }
122 
123   /**
124    * Tests a MR scan using specific start and stop rows.
125    * 
126    * @throws IOException
127    * @throws ClassNotFoundException
128    * @throws InterruptedException
129    */
130   @Test
131   public void testScanEmptyToBBA() throws IOException, InterruptedException,
132       ClassNotFoundException {
133     testScan(null, "bba", "baz");
134   }
135 
136   /**
137    * Tests a MR scan using specific start and stop rows.
138    * 
139    * @throws IOException
140    * @throws ClassNotFoundException
141    * @throws InterruptedException
142    */
143   @Test
144   public void testScanEmptyToBBB() throws IOException, InterruptedException,
145       ClassNotFoundException {
146     testScan(null, "bbb", "bba");
147   }
148 
149   /**
150    * Tests a MR scan using specific start and stop rows.
151    * 
152    * @throws IOException
153    * @throws ClassNotFoundException
154    * @throws InterruptedException
155    */
156   @Test
157   public void testScanEmptyToOPP() throws IOException, InterruptedException,
158       ClassNotFoundException {
159     testScan(null, "opp", "opo");
160   }
161 
162   /**
163    * Tests a MR scan using specific start and stop rows.
164    * 
165    * @throws IOException
166    * @throws ClassNotFoundException
167    * @throws InterruptedException
168    */
169   protected void testScan(String start, String stop, String last)
170       throws IOException, InterruptedException, ClassNotFoundException {
171     String jobName = "Scan" + (start != null ? start.toUpperCase() : "Empty")
172         + "To" + (stop != null ? stop.toUpperCase() : "Empty");
173     LOG.info("Before map/reduce startup - job " + jobName);
174     Configuration c = new Configuration(TEST_UTIL.getConfiguration());
175     Scan scan = new Scan();
176     scan.addFamily(INPUT_FAMILY);
177     if (start != null) {
178       scan.setStartRow(Bytes.toBytes(start));
179     }
180     c.set(KEY_STARTROW, start != null ? start : "");
181     if (stop != null) {
182       scan.setStopRow(Bytes.toBytes(stop));
183     }
184     c.set(KEY_LASTROW, last != null ? last : "");
185     LOG.info("scan before: " + scan);
186     Job job = new Job(c, jobName);
187 
188     FileSystem fs = FileSystem.get(c);
189     Path tmpDir = new Path("/" + UUID.randomUUID());
190     fs.mkdirs(tmpDir);
191     try {
192       TableMapReduceUtil.initTableSnapshotMapperJob(Bytes.toString(SNAPSHOT_NAME),
193           scan, TestTableInputFormatScanBase.ScanMapper.class,
194           ImmutableBytesWritable.class, ImmutableBytesWritable.class, job,
195           false, tmpDir);
196       job.setReducerClass(TestTableInputFormatScanBase.ScanReducer.class);
197       job.setNumReduceTasks(1); // one to get final "first" and "last" key
198       FileOutputFormat.setOutputPath(job, new Path(job.getJobName()));
199       LOG.info("Started " + job.getJobName());
200       assertTrue(job.waitForCompletion(true));
201       LOG.info("After map/reduce completion - job " + jobName);
202     } finally {
203       fs.delete(tmpDir, true);
204     }
205 
206   }
207 
208 }