View Javadoc

1   /**
2    * Licensed to the Apache Software Foundation (ASF) under one
3    * or more contributor license agreements.  See the NOTICE file
4    * distributed with this work for additional information
5    * regarding copyright ownership.  The ASF licenses this file
6    * to you under the Apache License, Version 2.0 (the
7    * "License"); you may not use this file except in compliance
8    * with the License.  You may obtain a copy of the License at
9    *
10   *     http://www.apache.org/licenses/LICENSE-2.0
11   *
12   * Unless required by applicable law or agreed to in writing, software
13   * distributed under the License is distributed on an "AS IS" BASIS,
14   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15   * See the License for the specific language governing permissions and
16   * limitations under the License.
17   */
18  
19  package org.apache.hadoop.hbase.regionserver.wal;
20  
21  import java.util.HashMap;
22  
23  import org.apache.hadoop.classification.InterfaceAudience;
24  import org.apache.hadoop.hbase.util.Bytes;
25  
26  import com.google.common.base.Preconditions;
27  
28  /**
29   * WALDictionary using an LRU eviction algorithm. Uses a linked list running
30   * through a hashtable.  Currently has max of 2^15 entries.  Will start
31   * evicting if exceeds this number  The maximum memory we expect this dictionary
32   * to take in the worst case is about:
33   * <code>(2 ^ 15) * 5 (Regionname, Row key, CF, Column qual, table) * 100 bytes (these are some big names) = ~16MB</code>.
34   * If you want to get silly, even at 1kb entries, it maxes out at 160 megabytes.
35   */
36  @InterfaceAudience.Private
37  public class LRUDictionary implements Dictionary {
38  
39    BidirectionalLRUMap backingStore;
40    @Override
41    public byte[] getEntry(short idx) {
42      return backingStore.get(idx);
43    }
44  
45    @Override
46    public void init(int initialSize) {
47      backingStore = new BidirectionalLRUMap(initialSize);
48    }
49    @Override
50    public short findEntry(byte[] data, int offset, int length) {
51      short ret = backingStore.findIdx(data, offset, length);
52      if (ret == NOT_IN_DICTIONARY) {
53        addEntry(data, offset, length);
54      }
55      return ret;
56    }
57  
58    @Override
59    public short addEntry(byte[] data, int offset, int length) {
60      if (length <= 0) return NOT_IN_DICTIONARY;
61      return backingStore.put(data, offset, length);
62    }
63  
64    @Override
65    public void clear() {
66      backingStore.clear();
67    }
68  
69    /*
70     * Internal class used to implement LRU eviction and dual lookup (by key and
71     * value).
72     * 
73     * This is not thread safe. Don't use in multi-threaded applications.
74     */
75    static class BidirectionalLRUMap {
76      private int currSize = 0;
77  
78      // Head and tail of the LRU list.
79      private Node head;
80      private Node tail;
81  
82      private HashMap<Node, Short> nodeToIndex = new HashMap<Node, Short>();
83      private Node[] indexToNode;
84      private int initSize = 0;
85  
86      public BidirectionalLRUMap(int initialSize) {
87        initSize = initialSize;
88        indexToNode = new Node[initialSize];
89        for (int i = 0; i < initialSize; i++) {
90          indexToNode[i] = new Node();
91        }
92      }
93  
94      private short put(byte[] array, int offset, int length) {
95        // We copy the bytes we want, otherwise we might be holding references to
96        // massive arrays in our dictionary (or those arrays might change)
97        byte[] stored = new byte[length];
98        Bytes.putBytes(stored, 0, array, offset, length);
99  
100       if (currSize < initSize) {
101         // There is space to add without evicting.
102         indexToNode[currSize].setContents(stored, 0, stored.length);
103         setHead(indexToNode[currSize]);
104         short ret = (short) currSize++;
105         nodeToIndex.put(indexToNode[ret], ret);
106         return ret;
107       } else {
108         short s = nodeToIndex.remove(tail);
109         tail.setContents(stored, 0, stored.length);
110         // we need to rehash this.
111         nodeToIndex.put(tail, s);
112         moveToHead(tail);
113         return s;
114       }
115     }
116 
117     private short findIdx(byte[] array, int offset, int length) {
118       Short s;
119       final Node comparisonNode = new Node();
120       comparisonNode.setContents(array, offset, length);
121       if ((s = nodeToIndex.get(comparisonNode)) != null) {
122         moveToHead(indexToNode[s]);
123         return s;
124       } else {
125         return -1;
126       }
127     }
128 
129     private byte[] get(short idx) {
130       Preconditions.checkElementIndex(idx, currSize);
131       moveToHead(indexToNode[idx]);
132       return indexToNode[idx].container;
133     }
134 
135     private void moveToHead(Node n) {
136       if (head == n) {
137         // no-op -- it's already the head.
138         return;
139       }
140       // At this point we definitely have prev, since it's not the head.
141       assert n.prev != null;
142       // Unlink prev.
143       n.prev.next = n.next;
144 
145       // Unlink next
146       if (n.next != null) {
147         n.next.prev = n.prev;
148       } else {
149         assert n == tail;
150         tail = n.prev;
151       }
152       // Node is now removed from the list. Re-add it at the head.
153       setHead(n);
154     }
155     
156     private void setHead(Node n) {
157       // assume it's already unlinked from the list at this point.
158       n.prev = null;
159       n.next = head;
160       if (head != null) {
161         assert head.prev == null;
162         head.prev = n;
163       }
164 
165       head = n;
166 
167       // First entry
168       if (tail == null) {
169         tail = n;
170       }
171     }
172 
173     private void clear() {
174       currSize = 0;
175       nodeToIndex.clear();
176       tail = null;
177       head = null;
178 
179       for (Node n : indexToNode) {
180         n.container = null;
181       }
182 
183       for (int i = 0; i < initSize; i++) {
184         indexToNode[i].next = null;
185         indexToNode[i].prev = null;
186       }
187     }
188 
189     private static class Node {
190       byte[] container;
191       int offset;
192       int length;
193       Node next; // link towards the tail
194       Node prev; // link towards the head
195 
196       public Node() {
197       }
198 
199       private void setContents(byte[] container, int offset, int length) {
200         this.container = container;
201         this.offset = offset;
202         this.length = length;
203       }
204 
205       @Override
206       public int hashCode() {
207         return Bytes.hashCode(container, offset, length);
208       }
209 
210       @Override
211       public boolean equals(Object other) {
212         if (!(other instanceof Node)) {
213           return false;
214         }
215 
216         Node casted = (Node) other;
217         return Bytes.equals(container, offset, length, casted.container,
218             casted.offset, casted.length);
219       }
220     }
221   }
222 }