From: Wolfgang M. M. <wol...@us...> - 2004-08-12 18:54:30
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/util/hashtable In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv11331/src/org/exist/util/hashtable Modified Files: Long2ObjectHashMap.java Added Files: SequencedLongHashMap.java Log Message: Switched back to simple LRU page replacement strategy for document data in dom.dbx. As every page is accessed at most two times during storage, the LRDCache did not work very well. Improved the LRU implementation. --- NEW FILE: SequencedLongHashMap.java --- /* * eXist Open Source Native XML Database * Copyright (C) 2000-04, Wolfgang M. Meier (wol...@ex...) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Library General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * $Id: SequencedLongHashMap.java,v 1.1 2004/08/12 18:54:20 wolfgang_m Exp $ */ package org.exist.util.hashtable; /** * A hash map additionally providing access to entries in the order in which * they were added. All entries are kept in a linked list. * * If a duplicate entry is added, the old entry is removed from the list and appended to the end. The * map thus implements a "Last Recently Used" behaviour. */ import java.util.Iterator; public class SequencedLongHashMap extends Long2ObjectHashMap { private static class Entry { long key; Object value; Entry next = null; Entry prev = null; public Entry(long key, Object value) { this.key = key; this.value = value; } } private Entry first = null; private Entry last = null; public SequencedLongHashMap() { super(); } public SequencedLongHashMap(int iSize) { super(iSize); } public void put(long key, Object value) { Entry entry = new Entry(key, value); Entry duplicate = null; try { duplicate = (Entry)insert(key, entry); } catch (HashtableOverflowException e) { long[] copyKeys = keys; Object[] copyValues = values; // enlarge the table with a prime value tabSize = (int) nextPrime(tabSize + tabSize / 2); keys = new long[tabSize]; values = new Object[tabSize]; items = 0; try { for (int k = 0; k < copyValues.length; k++) { if (copyValues[k] != null && copyValues[k] != REMOVED) insert(copyKeys[k], copyValues[k]); } duplicate = (Entry)insert(key, entry); } catch (HashtableOverflowException e1) { } } if(duplicate != null) removeEntry(duplicate); if(first == null) { first = entry; last = first; } else { last.next = entry; entry.prev = last; last = entry; } } public Object get(long key) { Entry entry = (Entry) super.get(key); return entry == null ? null : entry.value; } public Object remove(long key) { Entry entry = (Entry) super.remove(key); if(entry != null) { removeEntry(entry); return entry.value; } else return null; } /** * Remove the first entry added to the map. * * @return */ public Object removeFirst() { if(first == null) return null; super.remove(first.key); Entry head = first; first = head.next; if(head != null) head.prev = null; return head.value; } private void removeEntry(Entry entry) { if(entry.prev == null) { if(entry.next == null) { first = null; last = null; } else { entry.next.prev = null; first = entry.next; } } else { entry.prev.next = entry.next; if(entry.next == null) last = entry.prev; else entry.next.prev = entry.prev; } } /** * Returns an iterator over all entries in the * order in which they were inserted. */ public Iterator iterator() { return new SequencedLongIterator(Long2ObjectIterator.KEYS); } public Iterator valueIterator() { return new SequencedLongIterator(Long2ObjectIterator.VALUES); } protected class SequencedLongIterator extends HashtableIterator { private Entry current; public SequencedLongIterator(int type) { super(type); current = first; } /* (non-Javadoc) * @see java.util.Iterator#hasNext() */ public boolean hasNext() { return current != null; } /* (non-Javadoc) * @see org.exist.util.hashtable.Long2ObjectHashMap.Long2ObjectIterator#next() */ public Object next() { if(current == null) return null; Entry next = current; current = current.next; if(returnType == VALUES) { return next.value; } else return new Long(next.key); } } } Index: Long2ObjectHashMap.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/util/hashtable/Long2ObjectHashMap.java,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** Long2ObjectHashMap.java 12 Jul 2004 17:17:43 -0000 1.3 --- Long2ObjectHashMap.java 12 Aug 2004 18:54:20 -0000 1.4 *************** *** 140,144 **** } ! protected void insert(long key, Object value) throws HashtableOverflowException { if (value == null) throw new IllegalArgumentException("Illegal value: null"); --- 140,144 ---- } ! protected Object insert(long key, Object value) throws HashtableOverflowException { if (value == null) throw new IllegalArgumentException("Illegal value: null"); *************** *** 152,156 **** values[idx] = value; ++items; ! return; } else if (values[idx] == REMOVED) { // remember the bucket, but continue to check --- 152,156 ---- values[idx] = value; ++items; ! return null; } else if (values[idx] == REMOVED) { // remember the bucket, but continue to check *************** *** 159,164 **** } else if (keys[idx] == key) { // duplicate value values[idx] = value; ! return; } int rehashVal = rehash(idx); --- 159,165 ---- } else if (keys[idx] == key) { // duplicate value + Object dup = values[idx]; values[idx] = value; ! return dup; } int rehashVal = rehash(idx); *************** *** 176,184 **** values[idx] = value; ++items; ! return; } else if(keys[idx] == key) { // duplicate value values[idx] = value; ! return; } ++rehashCnt; --- 177,186 ---- values[idx] = value; ++items; ! return null; } else if(keys[idx] == key) { // duplicate value + Object dup = values[idx]; values[idx] = value; ! return dup; } ++rehashCnt; *************** *** 190,194 **** values[bucket] = value; ++items; ! return; } throw new HashtableOverflowException(); --- 192,196 ---- values[bucket] = value; ++items; ! return null; } throw new HashtableOverflowException(); |