1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  package org.apache.hadoop.hbase.snapshot;
19  
20  import static org.junit.Assert.assertEquals;
21  import static org.junit.Assert.assertTrue;
22  import static org.junit.Assert.fail;
23  
24  import java.io.IOException;
25  import java.util.Arrays;
26  import java.util.ArrayList;
27  import java.util.Collections;
28  import java.util.List;
29  
30  import org.apache.commons.logging.Log;
31  import org.apache.commons.logging.LogFactory;
32  import org.apache.hadoop.conf.Configuration;
33  import org.apache.hadoop.fs.FileStatus;
34  import org.apache.hadoop.fs.FileSystem;
35  import org.apache.hadoop.fs.FileUtil;
36  import org.apache.hadoop.fs.Path;
37  import org.apache.hadoop.hbase.HBaseTestingUtility;
38  import org.apache.hadoop.hbase.HConstants;
39  import org.apache.hadoop.hbase.HRegionInfo;
40  import org.apache.hadoop.hbase.HTableDescriptor;
41  import org.apache.hadoop.hbase.HColumnDescriptor;
42  import org.apache.hadoop.hbase.SmallTests;
43  import org.apache.hadoop.hbase.catalog.CatalogTracker;
44  import org.apache.hadoop.hbase.client.HConnection;
45  import org.apache.hadoop.hbase.client.HConnectionTestingUtility;
46  import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
47  import org.apache.hadoop.hbase.io.HFileLink;
48  import org.apache.hadoop.hbase.monitoring.MonitoredTask;
49  import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
50  import org.apache.hadoop.hbase.regionserver.HRegion;
51  import org.apache.hadoop.hbase.regionserver.StoreFile;
52  import org.apache.hadoop.hbase.util.Bytes;
53  import org.apache.hadoop.hbase.util.FSTableDescriptors;
54  import org.apache.hadoop.hbase.util.FSUtils;
55  import org.apache.hadoop.hbase.util.MD5Hash;
56  import org.junit.*;
57  import org.junit.experimental.categories.Category;
58  import org.mockito.Mockito;
59  
60  /**
61   * Test the restore/clone operation from a file-system point of view.
62   */
63  @Category(SmallTests.class)
64  public class TestRestoreSnapshotHelper {
65    final Log LOG = LogFactory.getLog(getClass());
66  
67    private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
68    private final static String TEST_FAMILY = "cf";
69    private final static String TEST_HFILE = "abc";
70  
71    private Configuration conf;
72    private Path archiveDir;
73    private FileSystem fs;
74    private Path rootDir;
75  
76    @Before
77    public void setup() throws Exception {
78      rootDir = TEST_UTIL.getDataTestDir("testRestore");
79      archiveDir = new Path(rootDir, HConstants.HFILE_ARCHIVE_DIRECTORY);
80      fs = TEST_UTIL.getTestFileSystem();
81      conf = TEST_UTIL.getConfiguration();
82      FSUtils.setRootDir(conf, rootDir);
83    }
84  
85    @After
86    public void tearDown() throws Exception {
87      fs.delete(TEST_UTIL.getDataTestDir(), true);
88    }
89  
90    @Test
91    public void testRestore() throws IOException {
92      HTableDescriptor htd = createTableDescriptor("testtb");
93  
94      Path snapshotDir = new Path(rootDir, "snapshot");
95      createSnapshot(rootDir, snapshotDir, htd);
96  
97      // Test clone a snapshot
98      HTableDescriptor htdClone = createTableDescriptor("testtb-clone");
99      testRestore(snapshotDir, htd.getNameAsString(), htdClone);
100     verifyRestore(rootDir, htd, htdClone);
101 
102     // Test clone a clone ("link to link")
103     Path cloneDir = HTableDescriptor.getTableDir(rootDir, htdClone.getName());
104     HTableDescriptor htdClone2 = createTableDescriptor("testtb-clone2");
105     testRestore(cloneDir, htdClone.getNameAsString(), htdClone2);
106     verifyRestore(rootDir, htd, htdClone2);
107   }
108 
109   private void verifyRestore(final Path rootDir, final HTableDescriptor sourceHtd,
110       final HTableDescriptor htdClone) throws IOException {
111     String[] files = getHFiles(HTableDescriptor.getTableDir(rootDir, htdClone.getName()));
112     assertEquals(2, files.length);
113     assertTrue(files[0] + " should be a HFileLink", HFileLink.isHFileLink(files[0]));
114     assertTrue(files[1] + " should be a Referene", StoreFile.isReference(files[1]));
115     assertEquals(sourceHtd.getNameAsString(), HFileLink.getReferencedTableName(files[0]));
116     assertEquals(TEST_HFILE, HFileLink.getReferencedHFileName(files[0]));
117     Path refPath = getReferredToFile(files[1]);
118     assertTrue(refPath.getName() + " should be a HFileLink", HFileLink.isHFileLink(refPath.getName()));
119     assertEquals(files[0], refPath.getName());
120   }
121 
122   /**
123    * Execute the restore operation
124    * @param snapshotDir The snapshot directory to use as "restore source"
125    * @param sourceTableName The name of the snapshotted table
126    * @param htdClone The HTableDescriptor of the table to restore/clone.
127    */
128   public void testRestore(final Path snapshotDir, final String sourceTableName,
129       final HTableDescriptor htdClone) throws IOException {
130     LOG.debug("pre-restore table=" + htdClone.getNameAsString() + " snapshot=" + snapshotDir);
131     FSUtils.logFileSystemState(fs, rootDir, LOG);
132 
133     FSTableDescriptors.createTableDescriptor(htdClone, conf);
134     RestoreSnapshotHelper helper = getRestoreHelper(rootDir, snapshotDir, sourceTableName, htdClone);
135     helper.restoreHdfsRegions();
136 
137     LOG.debug("post-restore table=" + htdClone.getNameAsString() + " snapshot=" + snapshotDir);
138     FSUtils.logFileSystemState(fs, rootDir, LOG);
139   }
140 
141   /**
142    * Initialize the restore helper, based on the snapshot and table information provided.
143    */
144   private RestoreSnapshotHelper getRestoreHelper(final Path rootDir, final Path snapshotDir,
145       final String sourceTableName, final HTableDescriptor htdClone) throws IOException {
146     CatalogTracker catalogTracker = Mockito.mock(CatalogTracker.class);
147     HTableDescriptor tableDescriptor = Mockito.mock(HTableDescriptor.class);
148     ForeignExceptionDispatcher monitor = Mockito.mock(ForeignExceptionDispatcher.class);
149     MonitoredTask status = Mockito.mock(MonitoredTask.class);
150 
151     SnapshotDescription sd = SnapshotDescription.newBuilder()
152       .setName("snapshot").setTable(sourceTableName).build();
153 
154     return new RestoreSnapshotHelper(conf, fs, sd, snapshotDir,
155       htdClone, HTableDescriptor.getTableDir(rootDir, htdClone.getName()), monitor, status);
156   }
157 
158   private void createSnapshot(final Path rootDir, final Path snapshotDir, final HTableDescriptor htd)
159       throws IOException {
160     // First region, simple with one plain hfile.
161     HRegion r0 = HRegion.createHRegion(new HRegionInfo(htd.getName()), archiveDir,
162         conf, htd, null, true, true);
163     Path storeFile = new Path(new Path(r0.getRegionDir(), TEST_FAMILY), TEST_HFILE);
164     fs.createNewFile(storeFile);
165     r0.close();
166 
167     // Second region, used to test the split case.
168     // This region contains a reference to the hfile in the first region.
169     HRegion r1 = HRegion.createHRegion(new HRegionInfo(htd.getName()), archiveDir,
170         conf, htd, null, true, true);
171     fs.createNewFile(new Path(new Path(r1.getRegionDir(), TEST_FAMILY),
172         storeFile.getName() + '.' + r0.getRegionInfo().getEncodedName()));
173     r1.close();
174 
175     Path tableDir = HTableDescriptor.getTableDir(archiveDir, htd.getName());
176     FileUtil.copy(fs, tableDir, fs, snapshotDir, false, conf);
177   }
178 
179   private HTableDescriptor createTableDescriptor(final String tableName) {
180     HTableDescriptor htd = new HTableDescriptor(tableName);
181     htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
182     return htd;
183   }
184 
185   private Path getReferredToFile(final String referenceName) {
186     Path fakeBasePath = new Path(new Path("table", "region"), "cf");
187     return StoreFile.getReferredToFile(new Path(fakeBasePath, referenceName));
188   }
189 
190   private String[] getHFiles(final Path tableDir) throws IOException {
191     List<String> files = new ArrayList<String>();
192     for (Path regionDir: FSUtils.getRegionDirs(fs, tableDir)) {
193       for (Path familyDir: FSUtils.getFamilyDirs(fs, regionDir)) {
194         for (FileStatus file: FSUtils.listStatus(fs, familyDir)) {
195           files.add(file.getPath().getName());
196         }
197       }
198     }
199     Collections.sort(files);
200     return files.toArray(new String[files.size()]);
201   }
202 }