1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 package org.apache.directory.mavibot.btree;
21
22
23 import java.io.File;
24 import java.io.FileInputStream;
25 import java.io.FileOutputStream;
26 import java.io.InputStream;
27 import java.io.ObjectInputStream;
28 import java.io.ObjectOutputStream;
29 import java.io.OutputStream;
30 import java.util.ArrayList;
31 import java.util.Collections;
32 import java.util.Iterator;
33 import java.util.List;
34 import java.util.Map;
35 import java.util.Set;
36 import java.util.TreeSet;
37 import java.util.concurrent.ConcurrentHashMap;
38
39 import org.slf4j.Logger;
40 import org.slf4j.LoggerFactory;
41
42
43
44
45
46
47
48 public class SpaceReclaimer
49 {
50
51 private RecordManager rm;
52
53 private static String COPIED_PAGE_MAP_DATA_FILE = "cpm.db";
54
55
56 protected static final Logger LOG = LoggerFactory.getLogger( SpaceReclaimer.class );
57
58
59
60
61
62
63 public SpaceReclaimer( RecordManager rm )
64 {
65 this.rm = rm;
66 }
67
68
69
70
71
72
73
74 void storeCopiedPageMap( File dir )
75 {
76 if ( rm.copiedPageMap.isEmpty() )
77 {
78 LOG.debug( "Copied page map is empty, nothing to store on disk." );
79 return;
80 }
81
82 File file = new File( dir, COPIED_PAGE_MAP_DATA_FILE );
83
84 try
85 {
86 LOG.debug( "Storing {} RevisionNames of Copied page map", rm.copiedPageMap.size() );
87
88 OutputStream fileOut = new FileOutputStream( file );
89
90 ObjectOutputStream objOut = new ObjectOutputStream( fileOut );
91
92 objOut.writeObject( rm.copiedPageMap );
93
94 objOut.close();
95
96 LOG.debug( "Successfully stored copied page map in {}", file.getAbsolutePath() );
97 }
98 catch( Exception e )
99 {
100 LOG.warn( "Failed to store the copied page map in {}", file.getAbsolutePath() );
101 LOG.warn( "", e );
102 }
103 }
104
105
106
107
108
109
110
111
112
113
114 ConcurrentHashMap<RevisionName, long[]> readCopiedPageMap( File dir )
115 {
116
117 ConcurrentHashMap<RevisionName, long[]> map = new ConcurrentHashMap<RevisionName, long[]>();
118
119 File file = new File( dir, COPIED_PAGE_MAP_DATA_FILE );
120
121 if ( !file.exists() )
122 {
123 LOG.debug( "Copied page map store {} doesn't exist, returning empty map", file.getAbsolutePath() );
124 return map;
125 }
126
127 try
128 {
129 LOG.debug( "Reading Copied page map data stored in {}", file.getAbsolutePath() );
130
131 InputStream fileIn = new FileInputStream( file );
132
133 ObjectInputStream objIn = new ObjectInputStream( fileIn );
134
135 map = ( ConcurrentHashMap<RevisionName, long[]> ) objIn.readObject();
136
137 objIn.close();
138
139 LOG.debug( "Successfully read copied page map containing {} RevisionNames", map.size() );
140 }
141 catch( Exception e )
142 {
143 LOG.warn( "Failed to read the copied page map from {}", file.getAbsolutePath() );
144 LOG.warn( "", e );
145 }
146 finally
147 {
148 boolean deleted = file.delete();
149
150
151
152 if( !deleted )
153 {
154 String warn = "Failed to delete the copied page map store " + file.getAbsolutePath() +
155 " Make sure the approapriate permissions are given to delete this file by mavibot process." ;
156 LOG.warn( warn );
157
158 throw new RuntimeException( warn );
159 }
160 }
161
162 return map;
163 }
164
165
166
167
168
169 void reclaim()
170 {
171
172 try
173 {
174 Set<String> managed = rm.getManagedTrees();
175
176 for ( String name : managed )
177 {
178 PersistedBTree tree = ( PersistedBTree ) rm.getManagedTree( name );
179
180 Set<Long> inUseRevisions = new TreeSet<Long>();
181
182
183 if ( tree != null )
184 {
185 Iterator<ReadTransaction> txnItr = tree.getReadTransactions().iterator();
186 while ( txnItr.hasNext() )
187 {
188 inUseRevisions.add( txnItr.next().getRevision() );
189 }
190 }
191
192 List<RevisionOffset> copiedRevisions = getRevisions( name );
193
194 for ( RevisionOffset ro : copiedRevisions )
195 {
196 long rv = ro.getRevision();
197 if ( inUseRevisions.contains( rv ) )
198 {
199
200 break;
201 }
202
203 long[] offsets = ro.getOffsets();
204
205
206
207 rm.free( offsets );
208
209 RevisionName key = new RevisionName( rv, name );
210 rm.copiedPageMap.remove( key );
211 }
212 }
213 }
214 catch ( Exception e )
215 {
216 e.printStackTrace();
217 }
218 }
219
220
221
222
223
224
225
226
227
228 private List<RevisionOffset> getRevisions( String name ) throws Exception
229 {
230 long nbElems = rm.copiedPageMap.size();
231
232
233 if ( nbElems == 0 )
234 {
235 return Collections.EMPTY_LIST;
236 }
237
238 Iterator<Map.Entry<RevisionName, long[]>> cursor = rm.copiedPageMap.entrySet().iterator();
239
240 List<RevisionOffset> lst = new ArrayList<RevisionOffset>();
241
242 while ( cursor.hasNext() )
243 {
244 Map.Entry<RevisionName, long[]> t = cursor.next();
245 RevisionName rn = t.getKey();
246 if ( name.equals( rn.getName() ) )
247 {
248
249 lst.add( new RevisionOffset( rn.getRevision(), t.getValue() ) );
250 }
251 }
252
253 return lst;
254 }
255 }