View Javadoc
1   /*
2    *   Licensed to the Apache Software Foundation (ASF) under one
3    *   or more contributor license agreements.  See the NOTICE file
4    *   distributed with this work for additional information
5    *   regarding copyright ownership.  The ASF licenses this file
6    *   to you under the Apache License, Version 2.0 (the
7    *   "License"); you may not use this file except in compliance
8    *   with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   *   Unless required by applicable law or agreed to in writing,
13   *   software distributed under the License is distributed on an
14   *   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15   *   KIND, either express or implied.  See the License for the
16   *   specific language governing permissions and limitations
17   *   under the License.
18   *
19   */
20  package org.apache.directory.mavibot.btree;
21  
22  
23  import java.io.File;
24  import java.io.FileInputStream;
25  import java.io.FileOutputStream;
26  import java.io.InputStream;
27  import java.io.ObjectInputStream;
28  import java.io.ObjectOutputStream;
29  import java.io.OutputStream;
30  import java.util.ArrayList;
31  import java.util.Collections;
32  import java.util.Iterator;
33  import java.util.List;
34  import java.util.Map;
35  import java.util.Set;
36  import java.util.TreeSet;
37  import java.util.concurrent.ConcurrentHashMap;
38  
39  import org.slf4j.Logger;
40  import org.slf4j.LoggerFactory;
41  
42  
43  /**
44   * A class used for reclaiming the copied pages.
45   *
46   * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
47   */
48  public class SpaceReclaimer
49  {
50      /** the record manager */
51      private RecordManager rm;
52  
53      private static String COPIED_PAGE_MAP_DATA_FILE = "cpm.db";
54  
55      /** The LoggerFactory used by this class */
56      protected static final Logger LOG = LoggerFactory.getLogger( SpaceReclaimer.class );
57  
58      /**
59       * Creates a new instance of SpaceReclaimer.
60       *
61       * @param rm the record manager
62       */
63      public SpaceReclaimer( RecordManager rm )
64      {
65          this.rm = rm;
66      }
67  
68      
69      /**
70       * stores the copied page map, if not empty, in a file under the given directory
71       * 
72       * @param dir the directory where mavibot database file is present
73       */
74      /* no qualifier */ void storeCopiedPageMap( File dir )
75      {
76          if ( rm.copiedPageMap.isEmpty() )
77          {
78              LOG.debug( "Copied page map is empty, nothing to store on disk." );
79              return;
80          }
81          
82          File file = new File( dir, COPIED_PAGE_MAP_DATA_FILE );
83  
84          try
85          {
86              LOG.debug( "Storing {} RevisionNames of Copied page map", rm.copiedPageMap.size() );
87              
88              OutputStream fileOut = new FileOutputStream( file );
89              
90              ObjectOutputStream objOut = new ObjectOutputStream( fileOut );
91              
92              objOut.writeObject( rm.copiedPageMap );
93              
94              objOut.close();
95              
96              LOG.debug( "Successfully stored copied page map in {}", file.getAbsolutePath() );
97          }
98          catch( Exception e )
99          {
100             LOG.warn( "Failed to store the copied page map in {}", file.getAbsolutePath() );
101             LOG.warn( "", e );
102         }
103     }
104 
105 
106     /**
107      * reads the copied page map from the file named {@link #COPIED_PAGE_MAP_DATA_FILE} if it
108      * is present under the given directory
109      * 
110      * @param dir the directory where mavibot database file is present
111      * 
112      * @return
113      */
114     /* no qualifier */ ConcurrentHashMap<RevisionName, long[]> readCopiedPageMap( File dir )
115     {
116         
117         ConcurrentHashMap<RevisionName, long[]> map = new ConcurrentHashMap<RevisionName, long[]>();
118         
119         File file = new File( dir, COPIED_PAGE_MAP_DATA_FILE );
120         
121         if ( !file.exists() )
122         {
123             LOG.debug( "Copied page map store {} doesn't exist, returning empty map", file.getAbsolutePath() );
124             return map;
125         }
126 
127         try
128         {
129             LOG.debug( "Reading Copied page map data stored in {}", file.getAbsolutePath() );
130             
131             InputStream fileIn = new FileInputStream( file );
132             
133             ObjectInputStream objIn = new ObjectInputStream( fileIn );
134             
135             map = ( ConcurrentHashMap<RevisionName, long[]> ) objIn.readObject();
136             
137             objIn.close();
138             
139             LOG.debug( "Successfully read copied page map containing {} RevisionNames", map.size() );
140         }
141         catch( Exception e )
142         {
143             LOG.warn( "Failed to read the copied page map from {}", file.getAbsolutePath() );
144             LOG.warn( "", e );
145         }
146         finally
147         {
148             boolean deleted = file.delete();
149             
150             // this is dangerous, cause during a subsequent restart the pages
151             // will be freed again, but this time they might have been in use
152             if( !deleted )
153             {
154                 String warn = "Failed to delete the copied page map store " + file.getAbsolutePath() +
155                     " Make sure the approapriate permissions are given to delete this file by mavibot process." ;
156                 LOG.warn( warn );
157                 
158                 throw new RuntimeException( warn );
159             }
160         }
161         
162         return map;
163     }
164 
165     
166     /**
167      * relcaims the copied pages
168      */
169     /* no qualifier */ void reclaim()
170     {
171         //System.out.println( "reclaiming pages" );
172         try
173         {
174             Set<String> managed = rm.getManagedTrees();
175 
176             for ( String name : managed )
177             {
178                 PersistedBTree tree = ( PersistedBTree ) rm.getManagedTree( name );
179 
180                 Set<Long> inUseRevisions = new TreeSet<Long>();
181 
182                 // the tree might have been removed
183                 if ( tree != null )
184                 {
185                     Iterator<ReadTransaction> txnItr = tree.getReadTransactions().iterator();
186                     while ( txnItr.hasNext() )
187                     {
188                         inUseRevisions.add( txnItr.next().getRevision() );
189                     }
190                 }
191 
192                 List<RevisionOffset> copiedRevisions = getRevisions( name );
193 
194                 for ( RevisionOffset ro : copiedRevisions )
195                 {
196                     long rv = ro.getRevision();
197                     if ( inUseRevisions.contains( rv ) )
198                     {
199                         //System.out.println( "Revision " + rv + " of BTree " + name + " is in use, not reclaiming pages" );
200                         break;
201                     }
202 
203                     long[] offsets = ro.getOffsets();
204 
205                     //System.out.println( "Reclaiming " + Arrays.toString( offsets ) + "( " + offsets.length + " ) pages of the revision " + rv + " of BTree " + name );
206 
207                     rm.free( offsets );
208 
209                     RevisionName key = new RevisionName( rv, name );
210                     rm.copiedPageMap.remove( key );
211                 }
212             }
213         }
214         catch ( Exception e )
215         {
216             e.printStackTrace();
217         }
218     }
219 
220 
221     /**
222      * gets a list of all the copied pages of a given B-Tree.
223      * 
224      * @param name the name of the B-Tree
225      * @return list of RevisionOffset
226      * @throws Exception
227      */
228     private List<RevisionOffset> getRevisions( String name ) throws Exception
229     {
230         long nbElems = rm.copiedPageMap.size();
231         //System.out.println( "Total number of entries in CPB " + nbElems );
232 
233         if ( nbElems == 0 )
234         {
235             return Collections.EMPTY_LIST;
236         }
237 
238         Iterator<Map.Entry<RevisionName, long[]>> cursor = rm.copiedPageMap.entrySet().iterator();
239 
240         List<RevisionOffset> lst = new ArrayList<RevisionOffset>();
241 
242         while ( cursor.hasNext() )
243         {
244             Map.Entry<RevisionName, long[]> t = cursor.next();
245             RevisionName rn = t.getKey();
246             if ( name.equals( rn.getName() ) )
247             {
248                 //System.out.println( t.getValue() );
249                 lst.add( new RevisionOffset( rn.getRevision(), t.getValue() ) );
250             }
251         }
252 
253         return lst;
254     }
255 }