You can subscribe to this list here.
2004 |
Jan
|
Feb
|
Mar
(57) |
Apr
(103) |
May
(164) |
Jun
(139) |
Jul
(173) |
Aug
(196) |
Sep
(221) |
Oct
(333) |
Nov
(214) |
Dec
(88) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2005 |
Jan
(163) |
Feb
(165) |
Mar
(98) |
Apr
(93) |
May
(199) |
Jun
(118) |
Jul
(200) |
Aug
(212) |
Sep
(185) |
Oct
(297) |
Nov
(437) |
Dec
(272) |
2006 |
Jan
(542) |
Feb
(329) |
Mar
(267) |
Apr
(332) |
May
(267) |
Jun
(130) |
Jul
(161) |
Aug
(348) |
Sep
(166) |
Oct
(305) |
Nov
(173) |
Dec
(173) |
2007 |
Jan
(199) |
Feb
(118) |
Mar
(133) |
Apr
(200) |
May
(208) |
Jun
(146) |
Jul
(198) |
Aug
(146) |
Sep
(187) |
Oct
(182) |
Nov
(181) |
Dec
(83) |
2008 |
Jan
(252) |
Feb
(124) |
Mar
(124) |
Apr
(101) |
May
(143) |
Jun
(122) |
Jul
(129) |
Aug
(60) |
Sep
(80) |
Oct
(89) |
Nov
(54) |
Dec
(112) |
2009 |
Jan
(88) |
Feb
(145) |
Mar
(105) |
Apr
(164) |
May
(123) |
Jun
(154) |
Jul
(374) |
Aug
(341) |
Sep
(219) |
Oct
(137) |
Nov
(373) |
Dec
(240) |
2010 |
Jan
(197) |
Feb
(270) |
Mar
(253) |
Apr
(150) |
May
(102) |
Jun
(51) |
Jul
(300) |
Aug
(512) |
Sep
(254) |
Oct
(258) |
Nov
(288) |
Dec
(143) |
2011 |
Jan
(238) |
Feb
(179) |
Mar
(253) |
Apr
(332) |
May
(248) |
Jun
(255) |
Jul
(216) |
Aug
(282) |
Sep
(146) |
Oct
(77) |
Nov
(86) |
Dec
(69) |
2012 |
Jan
(172) |
Feb
(234) |
Mar
(229) |
Apr
(101) |
May
(212) |
Jun
(267) |
Jul
(129) |
Aug
(210) |
Sep
(239) |
Oct
(271) |
Nov
(368) |
Dec
(220) |
2013 |
Jan
(179) |
Feb
(155) |
Mar
(59) |
Apr
(47) |
May
(99) |
Jun
(158) |
Jul
(185) |
Aug
(16) |
Sep
(16) |
Oct
(7) |
Nov
(20) |
Dec
(12) |
2014 |
Jan
(21) |
Feb
(17) |
Mar
(18) |
Apr
(13) |
May
(27) |
Jun
(15) |
Jul
(19) |
Aug
(22) |
Sep
(30) |
Oct
(16) |
Nov
(19) |
Dec
(16) |
2015 |
Jan
(14) |
Feb
(24) |
Mar
(33) |
Apr
(41) |
May
(14) |
Jun
(80) |
Jul
(53) |
Aug
(8) |
Sep
(7) |
Oct
(15) |
Nov
(13) |
Dec
(2) |
2016 |
Jan
(22) |
Feb
(12) |
Mar
(30) |
Apr
(6) |
May
(33) |
Jun
(16) |
Jul
(8) |
Aug
(20) |
Sep
(12) |
Oct
(18) |
Nov
(12) |
Dec
(11) |
2017 |
Jan
(24) |
Feb
(26) |
Mar
(47) |
Apr
(23) |
May
(19) |
Jun
(14) |
Jul
(28) |
Aug
(30) |
Sep
(17) |
Oct
|
Nov
|
Dec
|
2019 |
Jan
(1) |
Feb
(73) |
Mar
(90) |
Apr
(42) |
May
(116) |
Jun
(90) |
Jul
(127) |
Aug
(103) |
Sep
(56) |
Oct
(42) |
Nov
(95) |
Dec
(58) |
2020 |
Jan
(102) |
Feb
(31) |
Mar
(93) |
Apr
(60) |
May
(57) |
Jun
(45) |
Jul
(29) |
Aug
(32) |
Sep
(44) |
Oct
(86) |
Nov
(51) |
Dec
(71) |
2021 |
Jan
(44) |
Feb
(25) |
Mar
(78) |
Apr
(130) |
May
(64) |
Jun
(74) |
Jul
(21) |
Aug
(64) |
Sep
(40) |
Oct
(43) |
Nov
(21) |
Dec
(99) |
2022 |
Jan
(154) |
Feb
(64) |
Mar
(45) |
Apr
(95) |
May
(62) |
Jun
(48) |
Jul
(73) |
Aug
(37) |
Sep
(71) |
Oct
(27) |
Nov
(40) |
Dec
(65) |
2023 |
Jan
(89) |
Feb
(130) |
Mar
(124) |
Apr
(50) |
May
(93) |
Jun
(46) |
Jul
(45) |
Aug
(68) |
Sep
(62) |
Oct
(71) |
Nov
(108) |
Dec
(82) |
2024 |
Jan
(53) |
Feb
(76) |
Mar
(64) |
Apr
(75) |
May
(36) |
Jun
(54) |
Jul
(98) |
Aug
(137) |
Sep
(58) |
Oct
(177) |
Nov
(84) |
Dec
(52) |
2025 |
Jan
(70) |
Feb
(53) |
Mar
(72) |
Apr
(47) |
May
(88) |
Jun
(49) |
Jul
(86) |
Aug
(20) |
Sep
|
Oct
|
Nov
|
Dec
|
From: Wolfgang M. M. <wol...@us...> - 2004-06-08 08:16:53
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/storage/cache In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29485/src/org/exist/storage/cache Modified Files: Cacheable.java LRUCache.java GClockCache.java ClockCache.java Cache.java Log Message: * Resolved concurrency conflicts when accessing a collection: avoid dirty reads on the old document while it is being replaced. * The index settings specified in the configuration are now also applied to reindexed document fragments during an XUpdate. * Removed unnecessary data from org.exist.dom.DocumentImpl to reduce its in memory size. * Deferred addition of new documents to collection: when adding a large number of documents, keeping the actual document objects in memory slows down the indexing process. Class Collection now just remembers the document name, not the object itself. Index: Cacheable.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/cache/Cacheable.java,v retrieving revision 1.4 retrieving revision 1.5 diff -C2 -d -r1.4 -r1.5 *** Cacheable.java 4 Jun 2004 09:47:24 -0000 1.4 --- Cacheable.java 8 Jun 2004 08:16:07 -0000 1.5 *************** *** 107,109 **** --- 107,111 ---- */ public boolean allowUnload(); + + public boolean isDirty(); } Index: GClockCache.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/cache/GClockCache.java,v retrieving revision 1.11 retrieving revision 1.12 diff -C2 -d -r1.11 -r1.12 *** GClockCache.java 4 Jun 2004 09:47:24 -0000 1.11 --- GClockCache.java 8 Jun 2004 08:16:07 -0000 1.12 *************** *** 108,111 **** --- 108,119 ---- } + public boolean hasDirtyItems() { + for(int i = 0; i < count; i++) { + if(items[i] != null && items[i].isDirty()) + return true; + } + return false; + } + protected Cacheable removeOne(Cacheable item) { Cacheable old = null; Index: Cache.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/cache/Cache.java,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** Cache.java 17 Nov 2003 09:27:31 -0000 1.1 --- Cache.java 8 Jun 2004 08:16:07 -0000 1.2 *************** *** 73,76 **** --- 73,78 ---- public void remove(Cacheable item); + public boolean hasDirtyItems(); + /** * Call release on all items, but without Index: ClockCache.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/cache/ClockCache.java,v retrieving revision 1.15 retrieving revision 1.16 diff -C2 -d -r1.15 -r1.16 *** ClockCache.java 4 Jun 2004 09:47:24 -0000 1.15 --- ClockCache.java 8 Jun 2004 08:16:07 -0000 1.16 *************** *** 152,155 **** --- 152,163 ---- } + public boolean hasDirtyItems() { + for(int i = 0; i < count; i++) { + if(items[i] != null && items[i].isDirty()) + return true; + } + return false; + } + /* (non-Javadoc) * @see org.exist.storage.cache.Cache#getBuffers() Index: LRUCache.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/cache/LRUCache.java,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** LRUCache.java 12 Dec 2003 13:20:02 -0000 1.3 --- LRUCache.java 8 Jun 2004 08:16:07 -0000 1.4 *************** *** 101,104 **** --- 101,118 ---- } + + /* (non-Javadoc) + * @see org.exist.storage.cache.Cache#hasDirtyItems() + */ + public boolean hasDirtyItems() { + Cacheable next; + for(Iterator i = stack.iterator(); i.hasNext(); ) { + next = (Cacheable)i.next(); + if(next.isDirty()) + return true; + } + return false; + } + /* (non-Javadoc) * @see org.exist.storage.cache.Cache#getBuffers() |
From: Wolfgang M. M. <wol...@us...> - 2004-06-08 08:16:53
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/soap In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29485/src/org/exist/soap Modified Files: AdminSoapBindingImpl.java QuerySoapBindingImpl.java Log Message: * Resolved concurrency conflicts when accessing a collection: avoid dirty reads on the old document while it is being replaced. * The index settings specified in the configuration are now also applied to reindexed document fragments during an XUpdate. * Removed unnecessary data from org.exist.dom.DocumentImpl to reduce its in memory size. * Deferred addition of new documents to collection: when adding a large number of documents, keeping the actual document objects in memory slows down the indexing process. Class Collection now just remembers the document name, not the object itself. Index: QuerySoapBindingImpl.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/soap/QuerySoapBindingImpl.java,v retrieving revision 1.20 retrieving revision 1.21 diff -C2 -d -r1.20 -r1.21 *** QuerySoapBindingImpl.java 1 Apr 2004 14:12:46 -0000 1.20 --- QuerySoapBindingImpl.java 8 Jun 2004 08:16:10 -0000 1.21 *************** *** 389,395 **** ArraySet hitsByDoc = new ArraySet(50); NodeProxy p; for (Iterator i = ((NodeSet) resultSet).iterator(); i.hasNext();) { p = (NodeProxy) i.next(); ! if (p.doc.getFileName().equals(docPath)) hitsByDoc.add(p); } --- 389,397 ---- ArraySet hitsByDoc = new ArraySet(50); NodeProxy p; + String path; for (Iterator i = ((NodeSet) resultSet).iterator(); i.hasNext();) { p = (NodeProxy) i.next(); ! path = p.doc.getCollection().getName() + '/' + p.doc.getFileName(); ! if (path.equals(docPath)) hitsByDoc.add(p); } Index: AdminSoapBindingImpl.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/soap/AdminSoapBindingImpl.java,v retrieving revision 1.13 retrieving revision 1.14 diff -C2 -d -r1.13 -r1.14 *** AdminSoapBindingImpl.java 25 May 2004 09:26:07 -0000 1.13 --- AdminSoapBindingImpl.java 8 Jun 2004 08:16:10 -0000 1.14 *************** *** 122,126 **** throw new EXistException( "Collection " + collectionName + " not found"); ! DocumentImpl doc = collection.getDocument(path); if(doc == null) throw new EXistException("Document " + docName + " not found"); --- 122,126 ---- throw new EXistException( "Collection " + collectionName + " not found"); ! DocumentImpl doc = collection.getDocument(broker, docName); if(doc == null) throw new EXistException("Document " + docName + " not found"); *************** *** 158,162 **** throw new EXistException("Collection " + collectionName + " not found"); if(!replace) { ! DocumentImpl old = collection.getDocument(path); if(old != null) throw new RemoteException("Document exists and overwrite is not allowed"); --- 158,162 ---- throw new EXistException("Collection " + collectionName + " not found"); if(!replace) { ! DocumentImpl old = collection.getDocument(broker, path); if(old != null) throw new RemoteException("Document exists and overwrite is not allowed"); |
From: Wolfgang M. M. <wol...@us...> - 2004-06-08 08:16:53
|
Update of /cvsroot/exist/eXist-1.0/src/org/dbxml/core/filer In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29485/src/org/dbxml/core/filer Modified Files: BTree.java Log Message: * Resolved concurrency conflicts when accessing a collection: avoid dirty reads on the old document while it is being replaced. * The index settings specified in the configuration are now also applied to reindexed document fragments during an XUpdate. * Removed unnecessary data from org.exist.dom.DocumentImpl to reduce its in memory size. * Deferred addition of new documents to collection: when adding a large number of documents, keeping the actual document objects in memory slows down the indexing process. Class Collection now just remembers the document name, not the object itself. Index: BTree.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/dbxml/core/filer/BTree.java,v retrieving revision 1.26 retrieving revision 1.27 diff -C2 -d -r1.26 -r1.27 *** BTree.java 4 Jun 2004 09:42:19 -0000 1.26 --- BTree.java 8 Jun 2004 08:16:09 -0000 1.27 *************** *** 156,167 **** Runnable syncAction = new Runnable() { public void run() { ! try { ! // LOG.debug("Triggering cache sync for " + getFile().getName()); ! lock.acquire(Lock.WRITE_LOCK); ! cache.flush(); ! } catch (LockException e) { ! LOG.warn("Failed to acquire lock on dom.dbx"); ! } finally { ! lock.release(); } } --- 156,169 ---- Runnable syncAction = new Runnable() { public void run() { ! if(cache.hasDirtyItems()) { ! try { ! // LOG.debug("Triggering cache sync for " + getFile().getName()); ! lock.acquire(Lock.WRITE_LOCK); ! cache.flush(); ! } catch (LockException e) { ! LOG.warn("Failed to acquire lock on dom.dbx"); ! } finally { ! lock.release(); ! } } } |
From: Wolfgang M. M. <wol...@us...> - 2004-06-08 08:16:53
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/xquery/functions In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29485/src/org/exist/xquery/functions Modified Files: FunDocumentURI.java FunLang.java FunId.java Log Message: * Resolved concurrency conflicts when accessing a collection: avoid dirty reads on the old document while it is being replaced. * The index settings specified in the configuration are now also applied to reindexed document fragments during an XUpdate. * Removed unnecessary data from org.exist.dom.DocumentImpl to reduce its in memory size. * Deferred addition of new documents to collection: when adding a large number of documents, keeping the actual document objects in memory slows down the indexing process. Class Collection now just remembers the document name, not the object itself. Index: FunId.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/xquery/functions/FunId.java,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** FunId.java 25 Mar 2004 13:17:43 -0000 1.3 --- FunId.java 8 Jun 2004 08:16:09 -0000 1.4 *************** *** 89,93 **** QName id) { NodeSet attribs = ! (NodeSet) context.getBroker().findElementsByTagName(ElementValue.ATTRIBUTE_ID, docs, id, null); NodeProxy n, p; for (Iterator i = attribs.iterator(); i.hasNext();) { --- 89,93 ---- QName id) { NodeSet attribs = ! (NodeSet) context.getBroker().getElementIndex().findElementsByTagName(ElementValue.ATTRIBUTE_ID, docs, id, null); NodeProxy n, p; for (Iterator i = attribs.iterator(); i.hasNext();) { Index: FunLang.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/xquery/functions/FunLang.java,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** FunLang.java 28 May 2004 10:54:09 -0000 1.2 --- FunLang.java 8 Jun 2004 08:16:09 -0000 1.3 *************** *** 73,77 **** .getStringValue(); QName qname = new QName("lang", context.getURIForPrefix("xml"), "xml"); ! NodeSet attribs = context.getBroker().getAttributesByName(contextSequence.toNodeSet().getDocumentSet(), qname); NodeSet temp = new ExtArrayNodeSet(); NodeProxy p; --- 73,77 ---- .getStringValue(); QName qname = new QName("lang", context.getURIForPrefix("xml"), "xml"); ! NodeSet attribs = context.getBroker().getElementIndex().getAttributesByName(contextSequence.toNodeSet().getDocumentSet(), qname); NodeSet temp = new ExtArrayNodeSet(); NodeProxy p; Index: FunDocumentURI.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/xquery/functions/FunDocumentURI.java,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** FunDocumentURI.java 28 May 2004 10:54:09 -0000 1.2 --- FunDocumentURI.java 8 Jun 2004 08:16:09 -0000 1.3 *************** *** 67,71 **** Sequence s = arg.eval(contextSequence, contextItem); NodeProxy node = (NodeProxy) s.itemAt(0); ! return new StringValue(node.doc.getFileName()); } --- 67,72 ---- Sequence s = arg.eval(contextSequence, contextItem); NodeProxy node = (NodeProxy) s.itemAt(0); ! String path = node.doc.getCollection().getName() + '/' +node.doc.getFileName(); ! return new StringValue(path); } |
From: Wolfgang M. M. <wol...@us...> - 2004-06-08 08:16:26
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/security In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29485/src/org/exist/security Modified Files: SecurityManager.java Log Message: * Resolved concurrency conflicts when accessing a collection: avoid dirty reads on the old document while it is being replaced. * The index settings specified in the configuration are now also applied to reindexed document fragments during an XUpdate. * Removed unnecessary data from org.exist.dom.DocumentImpl to reduce its in memory size. * Deferred addition of new documents to collection: when adding a large number of documents, keeping the actual document objects in memory slows down the indexing process. Class Collection now just remembers the document name, not the object itself. Index: SecurityManager.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/security/SecurityManager.java,v retrieving revision 1.18 retrieving revision 1.19 diff -C2 -d -r1.18 -r1.19 *** SecurityManager.java 27 Apr 2004 15:46:59 -0000 1.18 --- SecurityManager.java 8 Jun 2004 08:16:15 -0000 1.19 *************** *** 92,96 **** sysCollection.setPermissions(0770); } ! Document acl = sysCollection.getDocument(SYSTEM + '/' + ACL_FILE); Element docElement = null; if (acl != null) --- 92,96 ---- sysCollection.setPermissions(0770); } ! Document acl = sysCollection.getDocument(broker, ACL_FILE); Element docElement = null; if (acl != null) |
From: Wolfgang M. M. <wol...@us...> - 2004-06-08 08:16:26
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/storage/store In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29485/src/org/exist/storage/store Modified Files: BFile.java DOMFile.java Log Message: * Resolved concurrency conflicts when accessing a collection: avoid dirty reads on the old document while it is being replaced. * The index settings specified in the configuration are now also applied to reindexed document fragments during an XUpdate. * Removed unnecessary data from org.exist.dom.DocumentImpl to reduce its in memory size. * Deferred addition of new documents to collection: when adding a large number of documents, keeping the actual document objects in memory slows down the indexing process. Class Collection now just remembers the document name, not the object itself. Index: DOMFile.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/store/DOMFile.java,v retrieving revision 1.30 retrieving revision 1.31 diff -C2 -d -r1.30 -r1.31 *** DOMFile.java 4 Jun 2004 09:47:25 -0000 1.30 --- DOMFile.java 8 Jun 2004 08:16:15 -0000 1.31 *************** *** 132,143 **** Runnable syncAction = new Runnable() { public void run() { ! try { ! // LOG.debug("Triggering cache sync"); ! lock.acquire(Lock.WRITE_LOCK); ! dataCache.flush(); ! } catch (LockException e) { ! LOG.warn("Failed to acquire lock on dom.dbx"); ! } finally { ! lock.release(); } } --- 132,144 ---- Runnable syncAction = new Runnable() { public void run() { ! if(dataCache.hasDirtyItems()) { ! try { ! lock.acquire(Lock.WRITE_LOCK); ! dataCache.flush(); ! } catch (LockException e) { ! LOG.warn("Failed to acquire lock on dom.dbx"); ! } finally { ! lock.release(); ! } } } Index: BFile.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/store/BFile.java,v retrieving revision 1.25 retrieving revision 1.26 diff -C2 -d -r1.25 -r1.26 *** BFile.java 4 Jun 2004 09:47:25 -0000 1.25 --- BFile.java 8 Jun 2004 08:16:15 -0000 1.26 *************** *** 116,127 **** Runnable syncAction = new Runnable() { public void run() { ! try { ! // LOG.debug("Triggering cache sync for " + getFile().getName()); ! lock.acquire(Lock.WRITE_LOCK); ! dataCache.flush(); ! } catch (LockException e) { ! LOG.warn("Failed to acquire lock on dom.dbx"); ! } finally { ! lock.release(); } } --- 116,128 ---- Runnable syncAction = new Runnable() { public void run() { ! if(dataCache.hasDirtyItems()) { ! try { ! lock.acquire(Lock.WRITE_LOCK); ! dataCache.flush(); ! } catch (LockException e) { ! LOG.warn("Failed to acquire lock on dom.dbx"); ! } finally { ! lock.release(); ! } } } |
From: Wolfgang M. M. <wol...@us...> - 2004-06-08 08:16:24
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/collections In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29485/src/org/exist/collections Modified Files: Collection.java Log Message: * Resolved concurrency conflicts when accessing a collection: avoid dirty reads on the old document while it is being replaced. * The index settings specified in the configuration are now also applied to reindexed document fragments during an XUpdate. * Removed unnecessary data from org.exist.dom.DocumentImpl to reduce its in memory size. * Deferred addition of new documents to collection: when adding a large number of documents, keeping the actual document objects in memory slows down the indexing process. Class Collection now just remembers the document name, not the object itself. Index: Collection.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/collections/Collection.java,v retrieving revision 1.27 retrieving revision 1.28 diff -C2 -d -r1.27 -r1.28 *** Collection.java 4 Jun 2004 09:42:45 -0000 1.27 --- Collection.java 8 Jun 2004 08:16:14 -0000 1.28 *************** *** 64,67 **** --- 64,68 ---- import org.exist.util.Lock; import org.exist.util.LockException; + import org.exist.util.ReentrantReadWriteLock; import org.exist.util.SyntaxException; import org.exist.util.hashtable.ObjectHashSet; *************** *** 102,105 **** --- 103,108 ---- private Map documents = new TreeMap(); + private boolean reloadRequired = false; + // the name of this collection private String name; *************** *** 131,137 **** --- 134,143 ---- private CollectionStore db; + private Lock lock = null; + public Collection(CollectionStore db, String name) { this.name = name; this.db = db; + lock = new ReentrantReadWriteLock(name); } *************** *** 175,178 **** --- 181,206 ---- } + /** + * Adds a document to the collection, but doesn't keep the document + * object in memory. The collection will be reloaded the first time the + * new document is accessed. Using this method helps to keep memory + * consumption low when loading many documents in a batch. + * + * @param broker + * @param doc + */ + public void addDocumentLink(DBBroker broker, DocumentImpl doc) { + if (doc.getDocId() < 0) + doc.setDocId(broker.getNextDocId(this)); + documents.put(doc.getFileName(), null); + reloadRequired = true; + } + + /** + * Removes the document from the internal list of resources, but + * doesn't delete the document object itself. + * + * @param doc + */ public void unlinkDocument(DocumentImpl doc) { documents.remove(doc.getFileName()); *************** *** 188,192 **** */ public Iterator collectionIterator() { - Lock lock = db.getLock(); try { lock.acquire(Lock.READ_LOCK); --- 216,219 ---- *************** *** 208,212 **** public List getDescendants(DBBroker broker, User user) { final ArrayList cl = new ArrayList(subcollections.size()); - Lock lock = db.getLock(); try { lock.acquire(Lock.READ_LOCK); --- 235,238 ---- *************** *** 251,255 **** private DocumentSet allDocs(DBBroker broker, DocumentSet docs) { - Lock lock = db.getLock(); try { lock.acquire(Lock.READ_LOCK); --- 277,280 ---- *************** *** 282,288 **** */ public DocumentSet getDocuments(DBBroker broker, DocumentSet docs) { - Lock lock = db.getLock(); try { lock.acquire(Lock.READ_LOCK); docs.addCollection(this); docs.addAll(broker, documents.values()); --- 307,316 ---- */ public DocumentSet getDocuments(DBBroker broker, DocumentSet docs) { try { lock.acquire(Lock.READ_LOCK); + if(reloadRequired) { + broker.reloadCollection(this); + reloadRequired = false; + } docs.addCollection(this); docs.addAll(broker, documents.values()); *************** *** 335,339 **** */ public int getChildCollectionCount() { - Lock lock = db.getLock(); try { lock.acquire(Lock.READ_LOCK); --- 363,366 ---- *************** *** 348,361 **** /** ! * Get a child-document. ! * ! *@param name Description of the Parameter ! *@return The document value */ ! public DocumentImpl getDocument(String name) { ! Lock lock = db.getLock(); try { lock.acquire(Lock.READ_LOCK); ! return (DocumentImpl) documents.get(name); } catch (LockException e) { LOG.warn(e.getMessage(), e); --- 375,396 ---- /** ! * Get a child resource as identified by path. This method doesn't put ! * a lock on the document nor does it recognize locks held by other threads. ! * There's no guarantee that the document still exists when accessing it. ! * ! *@param name The name of the document (without collection path) ! *@return the document */ ! public DocumentImpl getDocument(DBBroker broker, String name) { try { lock.acquire(Lock.READ_LOCK); ! if(reloadRequired) { ! broker.reloadCollection(this); ! reloadRequired = false; ! } ! DocumentImpl doc = (DocumentImpl) documents.get(name); ! if(doc == null) ! LOG.debug("Document " + name + " not found!"); ! return doc; } catch (LockException e) { LOG.warn(e.getMessage(), e); *************** *** 365,370 **** --- 400,442 ---- } } + + /** + * Retrieve a child resource after putting a read lock on it. With this method, + * access to the received document object is safe. + * + * @param broker + * @param name + * @return + * @throws LockException + */ + public DocumentImpl getDocumentWithLock(DBBroker broker, String name) + throws LockException { + try { + lock.acquire(Lock.READ_LOCK); + if(reloadRequired) { + broker.reloadCollection(this); + reloadRequired = false; + } + DocumentImpl doc = (DocumentImpl) documents.get(name); + Lock updateLock = doc.getUpdateLock(); + updateLock.acquire(Lock.READ_LOCK); + return doc; + } finally { + lock.release(); + } + } /** + * Release any locks held on the document. + * + * @param doc + */ + public void releaseDocument(DocumentImpl doc) { + if(doc != null) { + doc.getUpdateLock().release(Lock.READ_LOCK); + } + } + + /** * Returns the number of documents in this collection. * *************** *** 372,376 **** */ public int getDocumentCount() { - Lock lock = db.getLock(); try { lock.acquire(Lock.READ_LOCK); --- 444,447 ---- *************** *** 422,426 **** */ public Permission getPermissions() { - Lock lock = db.getLock(); try { lock.acquire(Lock.READ_LOCK); --- 493,496 ---- *************** *** 441,445 **** */ public boolean hasDocument(String name) { ! return getDocument(name) != null; } --- 511,515 ---- */ public boolean hasDocument(String name) { ! return documents.containsKey(name); } *************** *** 451,455 **** */ public boolean hasSubcollection(String name) { - Lock lock = db.getLock(); try { lock.acquire(Lock.READ_LOCK); --- 521,524 ---- *************** *** 468,475 **** *@return */ - public Iterator iterator() { - return getDocuments(null, new DocumentSet()).iterator(); - } - public Iterator iterator(DBBroker broker) { return getDocuments(broker, new DocumentSet()).iterator(); --- 537,540 ---- *************** *** 538,542 **** */ public void removeCollection(String name) throws LockException { - Lock lock = db.getLock(); try { lock.acquire(Lock.WRITE_LOCK); --- 603,606 ---- *************** *** 554,558 **** public void removeDocument(DBBroker broker, String docname) throws PermissionDeniedException, TriggerException, LockException { - Lock lock = db.getLock(); try { lock.acquire(Lock.READ_LOCK); --- 618,621 ---- *************** *** 567,572 **** } else configuration = null; ! String path = getName() + '/' + docname; ! DocumentImpl doc = getDocument(path); if (doc == null) return; --- 630,634 ---- } else configuration = null; ! DocumentImpl doc = getDocument(broker, docname); if (doc == null) return; *************** *** 583,588 **** doc); } ! broker.removeDocument(path); ! documents.remove(path); broker.saveCollection(this); } finally { --- 645,650 ---- doc); } ! broker.removeDocument(getName() + '/' + docname); ! documents.remove(docname); broker.saveCollection(this); } finally { *************** *** 593,601 **** public void removeBinaryResource(DBBroker broker, String docname) throws PermissionDeniedException, LockException { - Lock lock = db.getLock(); try { lock.acquire(Lock.WRITE_LOCK); ! String path = getName() + '/' + docname; ! DocumentImpl doc = getDocument(path); if(doc.isLockedForWrite()) throw new PermissionDeniedException("Document " + doc.getFileName() + --- 655,661 ---- public void removeBinaryResource(DBBroker broker, String docname) throws PermissionDeniedException, LockException { try { lock.acquire(Lock.WRITE_LOCK); ! DocumentImpl doc = getDocument(broker, docname); if(doc.isLockedForWrite()) throw new PermissionDeniedException("Document " + doc.getFileName() + *************** *** 622,626 **** throw new PermissionDeniedException("Document " + doc.getFileName() + " is locked for write"); - Lock lock = db.getLock(); try { lock.acquire(Lock.WRITE_LOCK); --- 682,685 ---- *************** *** 645,657 **** if (broker.isReadOnly()) throw new PermissionDeniedException("Database is read-only"); DocumentImpl document, oldDoc = null; XMLReader reader; InputSource source; - Lock lock = db.getLock(); try { lock.acquire(Lock.WRITE_LOCK); ! oldDoc = getDocument(getName() + '/' + name); ! ! if (oldDoc != null) { if(oldDoc.isLockedForWrite()) throw new PermissionDeniedException("Document " + name + --- 704,714 ---- if (broker.isReadOnly()) throw new PermissionDeniedException("Database is read-only"); + System.out.println(this); DocumentImpl document, oldDoc = null; XMLReader reader; InputSource source; try { lock.acquire(Lock.WRITE_LOCK); ! if (hasDocument(name) && (oldDoc = getDocument(broker, name)) != null) { if(oldDoc.isLockedForWrite()) throw new PermissionDeniedException("Document " + name + *************** *** 662,665 **** --- 719,726 ---- throw new PermissionDeniedException("The document is locked by user " + lockUser.getName()); + // check if the document is currently being changed by someone else + Lock oldLock = oldDoc.getUpdateLock(); + oldLock.acquire(Lock.WRITE_LOCK); + // do we have permissions for update? if (!oldDoc.getPermissions().validate(broker.getUser(), *************** *** 679,690 **** // document name if (oldDoc != null) { ! document = new DocumentImpl(broker, getName() + "/__" + name, ! this); document.setCreated(oldDoc.getCreated()); document.setLastModified(System.currentTimeMillis()); document.setPermissions(oldDoc.getPermissions()); } else { ! document = new DocumentImpl(broker, getName() + '/' + name, ! this); document.setCreated(System.currentTimeMillis()); document.getPermissions().setOwner(broker.getUser()); --- 740,749 ---- // document name if (oldDoc != null) { ! document = new DocumentImpl(broker, name, this); document.setCreated(oldDoc.getCreated()); document.setLastModified(System.currentTimeMillis()); document.setPermissions(oldDoc.getPermissions()); } else { ! document = new DocumentImpl(broker, name, this); document.setCreated(System.currentTimeMillis()); document.getPermissions().setOwner(broker.getUser()); *************** *** 757,761 **** document.setMaxDepth(document.getMaxDepth() + 1); document.calculateTreeLevelStartPoints(); ! // new document is valid: remove old document if (oldDoc != null) { LOG.debug("removing old document " + oldDoc.getFileName()); --- 816,821 ---- document.setMaxDepth(document.getMaxDepth() + 1); document.calculateTreeLevelStartPoints(); ! // new document is valid: remove old document ! if (oldDoc != null) { LOG.debug("removing old document " + oldDoc.getFileName()); *************** *** 763,771 **** broker.removeBinaryResource((BinaryDocument) oldDoc); else ! broker.removeDocument(oldDoc.getFileName()); ! document.setFileName(oldDoc.getFileName()); } - document.getUpdateLock().acquire(Lock.WRITE_LOCK); - addDocument(broker, document); indexer.setValidating(false); if (trigger != null) --- 823,835 ---- broker.removeBinaryResource((BinaryDocument) oldDoc); else ! broker.removeDocument(getName() + '/' + oldDoc.getFileName(), false); ! // we continue to use the old document object and just replace its contents ! oldDoc.copyOf(document); ! indexer.setDocumentObject(oldDoc); ! document = oldDoc; ! } else { ! document.getUpdateLock().acquire(Lock.WRITE_LOCK); ! document.setDocId(broker.getNextDocId(this)); } indexer.setValidating(false); if (trigger != null) *************** *** 786,804 **** } ! try { ! lock.acquire(Lock.WRITE_LOCK); ! broker.addDocument(this, document); ! broker.closeDocument(); ! broker.flush(); ! // if we are running in privileged mode (e.g. backup/restore) ! // notify the SecurityManager about changes ! if (document.getFileName().equals("/db/system/users.xml") ! && privileged == false) { ! // inform the security manager that system data has changed ! LOG.debug("users.xml changed"); ! broker.getBrokerPool().reloadSecurityManager(broker); ! } ! } finally { ! lock.release(); } } finally { --- 850,866 ---- } ! if(oldDoc == null) ! addDocumentLink(broker, document); ! broker.addDocument(this, document); ! broker.closeDocument(); ! broker.flush(); ! ! // if we are running in privileged mode (e.g. backup/restore) ! // notify the SecurityManager about changes ! if (getName().equals(SecurityManager.SYSTEM) && document.getFileName().equals(SecurityManager.ACL_FILE) ! && privileged == false) { ! // inform the security manager that system data has changed ! LOG.debug("users.xml changed"); ! broker.getBrokerPool().reloadSecurityManager(broker); } } finally { *************** *** 806,809 **** --- 868,872 ---- } broker.deleteObservers(); + System.out.println(this); return document; } *************** *** 823,836 **** DocumentImpl document, oldDoc = null; XMLReader reader; - Lock lock = db.getLock(); try { lock.acquire(Lock.WRITE_LOCK); ! oldDoc = getDocument(getName() + '/' + name); ! ! if (oldDoc != null) { ! // check if the document is currently being changed by someone else ! if(oldDoc.isLockedForWrite()) ! throw new PermissionDeniedException("Document " + ! name + " is already locked for write by a different process"); // check if the document is locked by another user User lockUser = oldDoc.getUserLock(); --- 886,892 ---- DocumentImpl document, oldDoc = null; XMLReader reader; try { lock.acquire(Lock.WRITE_LOCK); ! if (hasDocument(name) && (oldDoc = getDocument(broker, name)) != null) { // check if the document is locked by another user User lockUser = oldDoc.getUserLock(); *************** *** 838,841 **** --- 894,902 ---- throw new PermissionDeniedException("The document is locked by user " + lockUser.getName()); + + // check if the document is currently being changed by someone else + Lock oldLock = oldDoc.getUpdateLock(); + oldLock.acquire(Lock.WRITE_LOCK); + // do we have permissions for update? if (!oldDoc.getPermissions().validate(broker.getUser(), *************** *** 855,866 **** // document name if (oldDoc != null) { ! document = new DocumentImpl(broker, getName() + "/__" + name, ! this); document.setCreated(oldDoc.getCreated()); document.setLastModified(System.currentTimeMillis()); document.setPermissions(oldDoc.getPermissions()); } else { ! document = new DocumentImpl(broker, getName() + '/' + name, ! this); document.setCreated(System.currentTimeMillis()); document.getPermissions().setOwner(broker.getUser()); --- 916,925 ---- // document name if (oldDoc != null) { ! document = new DocumentImpl(broker, name, this); document.setCreated(oldDoc.getCreated()); document.setLastModified(System.currentTimeMillis()); document.setPermissions(oldDoc.getPermissions()); } else { ! document = new DocumentImpl(broker, name, this); document.setCreated(System.currentTimeMillis()); document.getPermissions().setOwner(broker.getUser()); *************** *** 939,947 **** broker.removeBinaryResource((BinaryDocument) oldDoc); else ! broker.removeDocument(oldDoc.getFileName()); ! document.setFileName(oldDoc.getFileName()); } - document.getUpdateLock().acquire(Lock.WRITE_LOCK); - addDocument(broker, document); parser.setValidating(false); --- 998,1009 ---- broker.removeBinaryResource((BinaryDocument) oldDoc); else ! broker.removeDocument(getName() + '/' + oldDoc.getFileName(), false); ! oldDoc.copyOf(document); ! parser.setDocumentObject(oldDoc); ! document = oldDoc; ! } else { ! document.getUpdateLock().acquire(Lock.WRITE_LOCK); ! document.setDocId(broker.getNextDocId(this)); } parser.setValidating(false); *************** *** 975,993 **** } ! try { ! lock.acquire(Lock.WRITE_LOCK); ! broker.addDocument(this, document); ! broker.closeDocument(); ! broker.flush(); ! // if we are running in privileged mode (e.g. backup/restore) ! // notify the SecurityManager about changes ! if (document.getFileName().equals("/db/system/users.xml") ! && privileged == false) { ! // inform the security manager that system data has changed ! LOG.debug("users.xml changed"); ! broker.getBrokerPool().reloadSecurityManager(broker); ! } ! } finally { ! lock.release(); } } finally { --- 1037,1053 ---- } ! if(oldDoc == null) ! addDocumentLink(broker, document); ! broker.addDocument(this, document); ! broker.closeDocument(); ! broker.flush(); ! ! // if we are running in privileged mode (e.g. backup/restore) ! // notify the SecurityManager about changes ! if (getName().equals(SecurityManager.SYSTEM) && document.getFileName().equals(SecurityManager.ACL_FILE) ! && privileged == false) { ! // inform the security manager that system data has changed ! LOG.debug("users.xml changed"); ! broker.getBrokerPool().reloadSecurityManager(broker); } } finally { *************** *** 1012,1023 **** DocumentImpl document, oldDoc = null; DOMStreamer streamer; - Lock lock = db.getLock(); try { lock.acquire(Lock.WRITE_LOCK); ! oldDoc = getDocument(getName() + '/' + name); ! if (oldDoc != null) { ! if(oldDoc.isLockedForWrite()) ! throw new PermissionDeniedException("Document " + name + ! "is already locked for write"); // check if the document is locked by another user User lockUser = oldDoc.getUserLock(); --- 1072,1078 ---- DocumentImpl document, oldDoc = null; DOMStreamer streamer; try { lock.acquire(Lock.WRITE_LOCK); ! if (hasDocument(name) && (oldDoc = getDocument(broker, name)) != null) { // check if the document is locked by another user User lockUser = oldDoc.getUserLock(); *************** *** 1025,1028 **** --- 1080,1087 ---- throw new PermissionDeniedException("The document is locked by user " + lockUser.getName()); + + // check if the document is currently being changed by someone else + oldDoc.getUpdateLock().acquire(Lock.WRITE_LOCK); + // do we have permissions for update? if (!oldDoc.getPermissions().validate(broker.getUser(), *************** *** 1042,1046 **** // document name if (oldDoc != null) { ! document = new DocumentImpl(broker, getName() + "/__" + name, this); document.setCreated(oldDoc.getCreated()); --- 1101,1105 ---- // document name if (oldDoc != null) { ! document = new DocumentImpl(broker, name, this); document.setCreated(oldDoc.getCreated()); *************** *** 1048,1052 **** document.setPermissions(oldDoc.getPermissions()); } else { ! document = new DocumentImpl(broker, getName() + '/' + name, this); document.setCreated(System.currentTimeMillis()); --- 1107,1111 ---- document.setPermissions(oldDoc.getPermissions()); } else { ! document = new DocumentImpl(broker, name, this); document.setCreated(System.currentTimeMillis()); *************** *** 1111,1119 **** broker.removeBinaryResource((BinaryDocument) oldDoc); else ! broker.removeDocument(oldDoc.getFileName()); ! document.setFileName(oldDoc.getFileName()); } - document.getUpdateLock().acquire(Lock.WRITE_LOCK); - addDocument(broker, document); parser.setValidating(false); --- 1170,1181 ---- broker.removeBinaryResource((BinaryDocument) oldDoc); else ! broker.removeDocument(getName() + '/' + oldDoc.getFileName(), false); ! oldDoc.copyOf(document); ! parser.setDocumentObject(oldDoc); ! document = oldDoc; ! } else { ! document.getUpdateLock().acquire(Lock.WRITE_LOCK); ! document.setDocId(broker.getNextDocId(this)); } parser.setValidating(false); *************** *** 1125,1146 **** try { // second pass: store the document ! LOG.debug("storing document ..."); streamer.serialize(node, true); ! try { ! lock.acquire(Lock.WRITE_LOCK); ! broker.addDocument(this, document); ! broker.closeDocument(); ! broker.flush(); ! // if we are running in privileged mode (e.g. backup/restore) ! // notify the SecurityManager about changes ! if (document.getFileName().equals("/db/system/users.xml") ! && privileged == false) { ! // inform the security manager that system data has changed ! LOG.debug("users.xml changed"); ! broker.getBrokerPool().reloadSecurityManager(broker); ! } ! } finally { ! lock.release(); } } finally { --- 1187,1207 ---- try { // second pass: store the document ! if(LOG.isDebugEnabled()) ! LOG.debug("storing document " + document.getFileName()); streamer.serialize(node, true); ! if(oldDoc == null) ! addDocumentLink(broker, document); ! broker.addDocument(this, document); ! broker.closeDocument(); ! broker.flush(); ! // if we are running in privileged mode (e.g. backup/restore) ! // notify the SecurityManager about changes ! if (getName().equals(SecurityManager.SYSTEM) && document.getFileName().equals(SecurityManager.ACL_FILE) ! && privileged == false) { ! // inform the security manager that system data has changed ! if(LOG.isDebugEnabled()) ! LOG.debug("users.xml changed"); ! broker.getBrokerPool().reloadSecurityManager(broker); } } finally { *************** *** 1157,1164 **** throw new PermissionDeniedException("Database is read-only"); BinaryDocument blob = null; - Lock lock = db.getLock(); try { lock.acquire(Lock.WRITE_LOCK); ! DocumentImpl oldDoc = getDocument(getName() + '/' + name); if (oldDoc != null) { if(oldDoc.isLockedForWrite()) --- 1218,1224 ---- throw new PermissionDeniedException("Database is read-only"); BinaryDocument blob = null; try { lock.acquire(Lock.WRITE_LOCK); ! DocumentImpl oldDoc = getDocument(broker, name); if (oldDoc != null) { if(oldDoc.isLockedForWrite()) *************** *** 1174,1178 **** Permission.UPDATE)) throw new PermissionDeniedException( ! "document exists and update " + "is not allowed"); // no: do we have write permissions? } else if (!getPermissions().validate(broker.getUser(), --- 1234,1238 ---- Permission.UPDATE)) throw new PermissionDeniedException( ! "document exists and update is not allowed"); // no: do we have write permissions? } else if (!getPermissions().validate(broker.getUser(), *************** *** 1181,1186 **** "not allowed to write to collection " + getName()); ! blob = new BinaryDocument(broker, getName() + '/' ! + name, this); if (oldDoc != null) { blob.setCreated(oldDoc.getCreated()); --- 1241,1245 ---- "not allowed to write to collection " + getName()); ! blob = new BinaryDocument(broker, name, this); if (oldDoc != null) { blob.setCreated(oldDoc.getCreated()); *************** *** 1192,1196 **** broker.removeBinaryResource((BinaryDocument) oldDoc); else ! broker.removeDocument(oldDoc.getFileName()); } else { blob.setCreated(System.currentTimeMillis()); --- 1251,1255 ---- broker.removeBinaryResource((BinaryDocument) oldDoc); else ! broker.removeDocument(getName() + '/' + oldDoc.getFileName()); } else { blob.setCreated(System.currentTimeMillis()); *************** *** 1200,1204 **** } broker.storeBinaryResource(blob, data); ! addDocument(broker, blob); broker.addDocument(this, blob); broker.closeDocument(); --- 1259,1263 ---- } broker.storeBinaryResource(blob, data); ! addDocumentLink(broker, blob); broker.addDocument(this, blob); broker.closeDocument(); *************** *** 1214,1218 **** public void setPermissions(int mode) throws LockException { - Lock lock = db.getLock(); try { lock.acquire(Lock.WRITE_LOCK); --- 1273,1276 ---- *************** *** 1224,1228 **** public void setPermissions(String mode) throws SyntaxException, LockException { - Lock lock = db.getLock(); try { lock.acquire(Lock.WRITE_LOCK); --- 1282,1285 ---- *************** *** 1239,1243 **** */ public void setPermissions(Permission permissions) throws LockException { - Lock lock = db.getLock(); try { lock.acquire(Lock.WRITE_LOCK); --- 1296,1299 ---- *************** *** 1277,1299 **** ostream.writeLong(created); DocumentImpl doc; ! for (Iterator i = iterator(); i.hasNext(); ) { doc = (DocumentImpl) i.next(); doc.write(ostream); } } - - public void correctResourcePaths() { - Map newMap = new TreeMap(); - DocumentImpl childDoc; - String path; - for(Iterator i = documents.values().iterator(); i.hasNext(); ) { - childDoc = (DocumentImpl)i.next(); - path = childDoc.getFileName(); - path = path.substring(path.lastIndexOf('/') + 1); - childDoc.setFileName(getName() + '/' + path); - newMap.put(childDoc.getFileName(), childDoc); - } - documents = newMap; - } private CollectionConfiguration getConfiguration(DBBroker broker) { --- 1333,1341 ---- ostream.writeLong(created); DocumentImpl doc; ! for (Iterator i = iterator(broker); i.hasNext(); ) { doc = (DocumentImpl) i.next(); doc.write(ostream); } } private CollectionConfiguration getConfiguration(DBBroker broker) { *************** *** 1304,1309 **** private CollectionConfiguration readCollectionConfiguration(DBBroker broker) { ! DocumentImpl doc = getDocument(getName() + '/' + COLLECTION_CONFIG_FILE); ! if (doc != null) { LOG.debug("found collection.xconf"); triggersEnabled = false; --- 1346,1355 ---- private CollectionConfiguration readCollectionConfiguration(DBBroker broker) { ! if (hasDocument(COLLECTION_CONFIG_FILE)) { ! DocumentImpl doc = getDocument(broker, COLLECTION_CONFIG_FILE); ! if(doc == null) { ! LOG.warn("collection.xconf exists but could not be loaded"); ! return null; ! } LOG.debug("found collection.xconf"); triggersEnabled = false; *************** *** 1341,1345 **** public void setTriggersEnabled(boolean enabled) { - Lock lock = db.getLock(); try { lock.acquire(Lock.WRITE_LOCK); --- 1387,1390 ---- *************** *** 1511,1513 **** --- 1556,1578 ---- return false; } + + + /* (non-Javadoc) + * @see org.exist.storage.cache.Cacheable#isDirty() + */ + public boolean isDirty() { + return false; + } + + public String toString() { + StringBuffer buf = new StringBuffer(); + buf.append("["); + for(Iterator i = documents.keySet().iterator(); i.hasNext(); ) { + buf.append(i.next()); + if(i.hasNext()) + buf.append(", "); + } + buf.append("]"); + return buf.toString(); + } } |
From: Wolfgang M. M. <wol...@us...> - 2004-06-08 08:16:23
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/util In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29485/src/org/exist/util Modified Files: ReentrantReadWriteLock.java MultiReadReentrantLock.java Log Message: * Resolved concurrency conflicts when accessing a collection: avoid dirty reads on the old document while it is being replaced. * The index settings specified in the configuration are now also applied to reindexed document fragments during an XUpdate. * Removed unnecessary data from org.exist.dom.DocumentImpl to reduce its in memory size. * Deferred addition of new documents to collection: when adding a large number of documents, keeping the actual document objects in memory slows down the indexing process. Class Collection now just remembers the document name, not the object itself. Index: MultiReadReentrantLock.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/util/MultiReadReentrantLock.java,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** MultiReadReentrantLock.java 2 Jun 2004 11:34:34 -0000 1.1 --- MultiReadReentrantLock.java 8 Jun 2004 08:16:14 -0000 1.2 *************** *** 74,78 **** * ensure that write locks are issued in the same order they are requested. */ ! private List waitingForWriteLock = new ArrayList(5); /** Default constructor. */ --- 74,78 ---- * ensure that write locks are issued in the same order they are requested. */ ! private List waitingForWriteLock = null; /** Default constructor. */ *************** *** 141,144 **** --- 141,146 ---- // log.debug("nested write lock: " + outstandingWriteLocks); // } + if(waitingForWriteLock == null) + waitingForWriteLock = new ArrayList(3); waitingForWriteLock.add(thisThread); } *************** *** 198,202 **** // could pull out of sub if block to get nested tracking working. ! if (outstandingReadLocks == 0 && waitingForWriteLock.size() > 0) { writeLockedThread = (Thread) waitingForWriteLock.get(0); // if ( log.isDebugEnabled() ) --- 200,204 ---- // could pull out of sub if block to get nested tracking working. ! if (outstandingReadLocks == 0 && waitingForWriteLock != null && waitingForWriteLock.size() > 0) { writeLockedThread = (Thread) waitingForWriteLock.get(0); // if ( log.isDebugEnabled() ) *************** *** 246,250 **** outstandingReadLocks--; if (outstandingReadLocks == 0 && writeLockedThread == null && ! waitingForWriteLock.size() > 0) { writeLockedThread = (Thread) waitingForWriteLock.get(0); // if ( log.isDebugEnabled() ) --- 248,252 ---- outstandingReadLocks--; if (outstandingReadLocks == 0 && writeLockedThread == null && ! waitingForWriteLock != null && waitingForWriteLock.size() > 0) { writeLockedThread = (Thread) waitingForWriteLock.get(0); // if ( log.isDebugEnabled() ) *************** *** 270,274 **** public synchronized boolean isLockedForWrite() { ! return writeLockedThread != null || waitingForWriteLock.size() > 0; } } --- 272,276 ---- public synchronized boolean isLockedForWrite() { ! return writeLockedThread != null || (waitingForWriteLock != null && waitingForWriteLock.size() > 0); } } Index: ReentrantReadWriteLock.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/util/ReentrantReadWriteLock.java,v retrieving revision 1.4 retrieving revision 1.5 diff -C2 -d -r1.4 -r1.5 *** ReentrantReadWriteLock.java 2 Jun 2004 11:34:34 -0000 1.4 --- ReentrantReadWriteLock.java 8 Jun 2004 08:16:14 -0000 1.5 *************** *** 28,32 **** protected long holds_ = 0; protected int mode_ = Lock.READ_LOCK; ! private long timeOut_ = 60000L; public ReentrantReadWriteLock(String id) { --- 28,32 ---- protected long holds_ = 0; protected int mode_ = Lock.READ_LOCK; ! private long timeOut_ = 240000L; public ReentrantReadWriteLock(String id) { |
From: Wolfgang M. M. <wol...@us...> - 2004-06-08 08:16:22
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29485/src/org/exist Modified Files: Indexer.java Log Message: * Resolved concurrency conflicts when accessing a collection: avoid dirty reads on the old document while it is being replaced. * The index settings specified in the configuration are now also applied to reindexed document fragments during an XUpdate. * Removed unnecessary data from org.exist.dom.DocumentImpl to reduce its in memory size. * Deferred addition of new documents to collection: when adding a large number of documents, keeping the actual document objects in memory slows down the indexing process. Class Collection now just remembers the document name, not the object itself. Index: Indexer.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/Indexer.java,v retrieving revision 1.9 retrieving revision 1.10 diff -C2 -d -r1.9 -r1.10 *** Indexer.java 28 May 2004 10:54:17 -0000 1.9 --- Indexer.java 8 Jun 2004 08:16:13 -0000 1.10 *************** *** 33,36 **** --- 33,37 ---- import org.apache.log4j.Category; + import org.apache.log4j.Logger; import org.exist.dom.AttrImpl; import org.exist.dom.CommentImpl; *************** *** 70,75 **** implements ContentHandler, LexicalHandler, ErrorHandler { ! private final static Category LOG = ! Category.getInstance(Indexer.class.getName()); protected DBBroker broker = null; --- 71,76 ---- implements ContentHandler, LexicalHandler, ErrorHandler { ! private final static Logger LOG = ! Logger.getLogger(Indexer.class); protected DBBroker broker = null; *************** *** 146,149 **** --- 147,156 ---- } + /** + * Prepare the indexer for parsing a new document. This will + * reset the internal state of the Indexer object. + * + * @param doc + */ public void setDocument(DocumentImpl doc) { document = doc; *************** *** 155,158 **** --- 162,175 ---- rootNode = null; } + + /** + * Set the document object to be used by this Indexer. This + * method doesn't reset the internal state. + * + * @param doc + */ + public void setDocumentObject(DocumentImpl doc) { + document = doc; + } public void characters(char[] ch, int start, int length) { |
From: Wolfgang M. M. <wol...@us...> - 2004-06-04 09:48:06
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/storage/cache In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv30013/src/org/exist/storage/cache Modified Files: LRDCache.java Cacheable.java ClockCache.java GClockCache.java Log Message: Improved the periodic flushing of cache contents: this is now handled by a background thread (SyncDaemon) instead of the cache object itself. Different settings are possible for different files. The vital files, dom.dbx and collections.dbx, are flushed very often (every second). The other files can be reconstructed and are thus written less frequently. Index: GClockCache.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/cache/GClockCache.java,v retrieving revision 1.10 retrieving revision 1.11 diff -C2 -d -r1.10 -r1.11 *** GClockCache.java 3 May 2004 12:58:11 -0000 1.10 --- GClockCache.java 4 Jun 2004 09:47:24 -0000 1.11 *************** *** 47,53 **** protected Long2ObjectHashMap map; protected int hits = 0, fails = 0; - - protected long syncPeriod = 20000; - protected long lastSync = System.currentTimeMillis(); public GClockCache(int size) { --- 47,50 ---- *************** *** 73,78 **** } else removeOne(item); - if(System.currentTimeMillis() - lastSync > syncPeriod) - flush(); } --- 70,73 ---- *************** *** 105,113 **** public void flush() { for (int i = 0; i < count; i++) { ! if (items[i] != null) ! items[i].sync(); } ! lastSync = System.currentTimeMillis(); } --- 100,109 ---- public void flush() { + int written = 0; for (int i = 0; i < count; i++) { ! if (items[i] != null && items[i].sync()) ! ++written; } ! // LOG.debug(written + " pages written to disk"); } Index: LRDCache.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/cache/LRDCache.java,v retrieving revision 1.4 retrieving revision 1.5 diff -C2 -d -r1.4 -r1.5 *** LRDCache.java 3 May 2004 12:58:11 -0000 1.4 --- LRDCache.java 4 Jun 2004 09:47:24 -0000 1.5 *************** *** 82,88 **** else if (totalReferences > nextCleanup) ageReferences(); - if(System.currentTimeMillis() - lastSync > syncPeriod) { - flush(); - } } --- 82,85 ---- Index: ClockCache.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/cache/ClockCache.java,v retrieving revision 1.14 retrieving revision 1.15 diff -C2 -d -r1.14 -r1.15 *** ClockCache.java 19 May 2004 12:01:24 -0000 1.14 --- ClockCache.java 4 Jun 2004 09:47:24 -0000 1.15 *************** *** 45,51 **** protected int hits = 0, fails = 0; - private long lastSync = System.currentTimeMillis(); - private long syncPeriod = 30000; - public ClockCache(int size) { this.size = size; --- 45,48 ---- *************** *** 76,81 **** removeOne(item); } - //if(System.currentTimeMillis() - lastSync > syncPeriod) - // flush(); } --- 73,76 ---- *************** *** 103,106 **** --- 98,102 ---- old.sync(); } + // System.out.println(old.getKey() + " -> " + item.getKey()); items[bucket] = item; map.put(item.getKey(), item); *************** *** 148,155 **** */ public void flush() { ! for(int i = 0; i < count; i++) ! if(items[i] != null) ! items[i].sync(); ! lastSync = System.currentTimeMillis(); } --- 144,153 ---- */ public void flush() { ! int written = 0; ! for(int i = 0; i < count; i++) { ! if(items[i] != null && items[i].sync()) ! ++written; ! } ! // LOG.debug(written + " pages written to disk"); } Index: Cacheable.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/cache/Cacheable.java,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** Cacheable.java 2 Feb 2004 15:30:35 -0000 1.3 --- Cacheable.java 4 Jun 2004 09:47:24 -0000 1.4 *************** *** 96,100 **** * to disk. */ ! public void sync(); /** --- 96,100 ---- * to disk. */ ! public boolean sync(); /** |
From: Wolfgang M. M. <wol...@us...> - 2004-06-04 09:47:37
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/storage In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv30013/src/org/exist/storage Modified Files: NativeBroker.java Log Message: Improved the periodic flushing of cache contents: this is now handled by a background thread (SyncDaemon) instead of the cache object itself. Different settings are possible for different files. The vital files, dom.dbx and collections.dbx, are flushed very often (every second). The other files can be reconstructed and are thus written less frequently. Index: NativeBroker.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/NativeBroker.java,v retrieving revision 1.78 retrieving revision 1.79 diff -C2 -d -r1.78 -r1.79 *** NativeBroker.java 3 Jun 2004 07:30:29 -0000 1.78 --- NativeBroker.java 4 Jun 2004 09:47:26 -0000 1.79 *************** *** 184,188 **** "elements index buffer size: " + indexBuffers + "; " + dataBuffers); elementsDb = ! new BFile( new File(dataDir + pathSep + "elements.dbx"), indexBuffers, --- 184,188 ---- "elements index buffer size: " + indexBuffers + "; " + dataBuffers); elementsDb = ! new BFile(pool, new File(dataDir + pathSep + "elements.dbx"), indexBuffers, *************** *** 208,215 **** LOG.debug("page buffer size = " + indexBuffers + "; " + dataBuffers); domDb = ! new DOMFile( ! new File(dataDir + pathSep + "dom.dbx"), ! indexBuffers, ! dataBuffers); if (!domDb.exists()) { LOG.info("creating dom.dbx"); --- 208,213 ---- LOG.debug("page buffer size = " + indexBuffers + "; " + dataBuffers); domDb = ! new DOMFile(pool, new File(dataDir + pathSep + "dom.dbx"), ! indexBuffers, dataBuffers); if (!domDb.exists()) { LOG.info("creating dom.dbx"); *************** *** 239,243 **** + dataBuffers); collectionsDb = ! new CollectionStore( new File(dataDir + pathSep + "collections.dbx"), indexBuffers, --- 237,241 ---- + dataBuffers); collectionsDb = ! new CollectionStore(pool, new File(dataDir + pathSep + "collections.dbx"), indexBuffers, |
From: Wolfgang M. M. <wol...@us...> - 2004-06-04 09:47:37
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/storage/store In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv30013/src/org/exist/storage/store Modified Files: CollectionStore.java DOMFile.java BFile.java Log Message: Improved the periodic flushing of cache contents: this is now handled by a background thread (SyncDaemon) instead of the cache object itself. Different settings are possible for different files. The vital files, dom.dbx and collections.dbx, are flushed very often (every second). The other files can be reconstructed and are thus written less frequently. Index: DOMFile.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/store/DOMFile.java,v retrieving revision 1.29 retrieving revision 1.30 diff -C2 -d -r1.29 -r1.30 *** DOMFile.java 25 May 2004 09:26:10 -0000 1.29 --- DOMFile.java 4 Jun 2004 09:47:25 -0000 1.30 *************** *** 41,44 **** --- 41,45 ---- import org.exist.dom.NodeProxy; import org.exist.dom.XMLUtil; + import org.exist.storage.BrokerPool; import org.exist.storage.BufferStats; import org.exist.storage.NativeBroker; *************** *** 49,52 **** --- 50,54 ---- import org.exist.util.ByteConversion; import org.exist.util.Lock; + import org.exist.util.LockException; import org.exist.util.Lockable; import org.exist.util.ReadOnlyException; *************** *** 101,109 **** // page types public final static byte LOB = 21; - public final static byte RECORD = 20; ! protected final static short OVERFLOW = 0; ! private final Cache dataCache; --- 103,111 ---- // page types public final static byte LOB = 21; public final static byte RECORD = 20; + public final static short OVERFLOW = 0; ! public final static long DATA_SYNC_PERIOD = 1000; ! private final Cache dataCache; *************** *** 119,124 **** private DocumentImpl currentDocument = null; ! public DOMFile(int buffers, int dataBuffers) { ! super(buffers); lock = new ReentrantReadWriteLock("dom.dbx"); fileHeader = (DOMFileHeader) getFileHeader(); --- 121,126 ---- private DocumentImpl currentDocument = null; ! public DOMFile(BrokerPool pool, int buffers, int dataBuffers) { ! super(pool, buffers); lock = new ReentrantReadWriteLock("dom.dbx"); fileHeader = (DOMFileHeader) getFileHeader(); *************** *** 127,144 **** dataCache = new ClockCache(dataBuffers); dataCache.setFileName("dom.dbx"); } ! public DOMFile(File file) { ! this(256, 256); ! setFile(file); ! } ! ! public DOMFile(File file, int buffers) { ! this(buffers, 256); ! setFile(file); ! } ! ! public DOMFile(File file, int buffers, int dataBuffers) { ! this(buffers, dataBuffers); setFile(file); } --- 129,151 ---- dataCache = new ClockCache(dataBuffers); dataCache.setFileName("dom.dbx"); + + Runnable syncAction = new Runnable() { + public void run() { + try { + // LOG.debug("Triggering cache sync"); + lock.acquire(Lock.WRITE_LOCK); + dataCache.flush(); + } catch (LockException e) { + LOG.warn("Failed to acquire lock on dom.dbx"); + } finally { + lock.release(); + } + } + }; + pool.getSyncDaemon().executePeriodically(DATA_SYNC_PERIOD, syncAction, false); } ! public DOMFile(BrokerPool pool, File file, int buffers, int dataBuffers) { ! this(pool, buffers, dataBuffers); setFile(file); } *************** *** 624,628 **** public boolean create() throws DBException { ! if (super.create((short) 12)) return true; else --- 631,635 ---- public boolean create() throws DBException { ! if (super.create((short) 12, lock)) return true; else *************** *** 958,962 **** */ public boolean open() throws DBException { ! if (super.open(FILE_FORMAT_VERSION_ID)) return true; else --- 965,969 ---- */ public boolean open() throws DBException { ! if (super.open(FILE_FORMAT_VERSION_ID, lock)) return true; else *************** *** 1705,1715 **** } ! /* ! * (non-Javadoc) ! * ! * @see org.exist.storage.cache.Cacheable#release() ! */ ! public void sync() { ! if (isDirty()) write(); } --- 1712,1721 ---- } ! public boolean sync() { ! if (isDirty()) { ! write(); ! return true; ! } ! return false; } Index: BFile.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/store/BFile.java,v retrieving revision 1.24 retrieving revision 1.25 diff -C2 -d -r1.24 -r1.25 *** BFile.java 2 Jun 2004 11:34:36 -0000 1.24 --- BFile.java 4 Jun 2004 09:47:25 -0000 1.25 *************** *** 37,40 **** --- 37,41 ---- import org.dbxml.core.filer.BTreeException; import org.dbxml.core.indexer.IndexQuery; + import org.exist.storage.BrokerPool; import org.exist.storage.BufferStats; import org.exist.storage.cache.Cache; *************** *** 80,83 **** --- 81,86 ---- public final static short FILE_FORMAT_VERSION_ID = 3; + public final static long DATA_SYNC_PERIOD = 15000; + // minimum free space a page should have to be // considered for reusing *************** *** 103,132 **** public int fixedKeyLen = -1; ! public BFile() { ! super(); ! fileHeader = (BFileHeader) getFileHeader(); ! minFree = PAGE_MIN_FREE; ! } ! ! public BFile(File file) { ! super(file); ! fileHeader = (BFileHeader) getFileHeader(); ! dataCache = new LRDCache(256); ! dataCache.setFileName(getFile().getName()); ! minFree = PAGE_MIN_FREE; ! lock = new ReentrantReadWriteLock(file.getName()); ! } ! ! public BFile(File file, int buffers) { ! super(file, buffers); ! fileHeader = (BFileHeader) getFileHeader(); ! dataCache = new LRDCache(buffers); ! dataCache.setFileName(getFile().getName()); ! minFree = PAGE_MIN_FREE; ! lock = new ReentrantReadWriteLock(file.getName()); ! } ! ! public BFile(File file, int btreeBuffers, int dataBuffers) { ! super(file, btreeBuffers); fileHeader = (BFileHeader) getFileHeader(); dataCache = new LRDCache(dataBuffers); --- 106,111 ---- public int fixedKeyLen = -1; ! public BFile(BrokerPool pool, File file, int btreeBuffers, int dataBuffers) { ! super(pool, file, btreeBuffers); fileHeader = (BFileHeader) getFileHeader(); dataCache = new LRDCache(dataBuffers); *************** *** 134,137 **** --- 113,131 ---- minFree = PAGE_MIN_FREE; lock = new ReentrantReadWriteLock(file.getName()); + + Runnable syncAction = new Runnable() { + public void run() { + try { + // LOG.debug("Triggering cache sync for " + getFile().getName()); + lock.acquire(Lock.WRITE_LOCK); + dataCache.flush(); + } catch (LockException e) { + LOG.warn("Failed to acquire lock on dom.dbx"); + } finally { + lock.release(); + } + } + }; + pool.getSyncDaemon().executePeriodically(getDataSyncPeriod(), syncAction, false); } *************** *** 152,155 **** --- 146,153 ---- } + protected long getDataSyncPeriod() { + return DATA_SYNC_PERIOD; + } + /** * Append the given data fragment to the value associated *************** *** 239,243 **** public boolean create() throws DBException { ! if (super.create((short) fixedKeyLen)) { fileHeader.setLastDataPage(-1); return true; --- 237,241 ---- public boolean create() throws DBException { ! if (super.create((short) fixedKeyLen, lock)) { fileHeader.setLastDataPage(-1); return true; *************** *** 540,544 **** public boolean open() throws DBException { ! return super.open(FILE_FORMAT_VERSION_ID); } --- 538,542 ---- public boolean open() throws DBException { ! return super.open(FILE_FORMAT_VERSION_ID, lock); } *************** *** 1189,1196 **** * @see org.exist.storage.cache.Cacheable#release() */ ! public void sync() { if (isDirty()) { try { write(); } catch (IOException e) { LOG.error("IO exception occurred while saving page " --- 1187,1195 ---- * @see org.exist.storage.cache.Cacheable#release() */ ! public boolean sync() { if (isDirty()) { try { write(); + return true; } catch (IOException e) { LOG.error("IO exception occurred while saving page " *************** *** 1198,1201 **** --- 1197,1201 ---- } } + return false; } Index: CollectionStore.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/store/CollectionStore.java,v retrieving revision 1.4 retrieving revision 1.5 diff -C2 -d -r1.4 -r1.5 *** CollectionStore.java 3 May 2004 12:58:12 -0000 1.4 --- CollectionStore.java 4 Jun 2004 09:47:25 -0000 1.5 *************** *** 9,12 **** --- 9,13 ---- import org.exist.collections.CollectionCache; + import org.exist.storage.BrokerPool; public class CollectionStore extends BFile { *************** *** 16,41 **** private CollectionCache collectionsCache = new CollectionCache(COLLECTION_BUFFER_SIZE); - - /** - * - */ - public CollectionStore() { - super(); - } - - /** - * @param file - */ - public CollectionStore(File file) { - super(file); - } - - /** - * @param file - * @param buffers - */ - public CollectionStore(File file, int buffers) { - super(file, buffers); - } /** --- 17,20 ---- *************** *** 44,49 **** * @param dataBuffers */ ! public CollectionStore(File file, int btreeBuffers, int dataBuffers) { ! super(file, btreeBuffers, dataBuffers); } --- 23,28 ---- * @param dataBuffers */ ! public CollectionStore(BrokerPool pool, File file, int btreeBuffers, int dataBuffers) { ! super(pool, file, btreeBuffers, dataBuffers); } *************** *** 51,53 **** --- 30,48 ---- return collectionsCache; } + + + /* (non-Javadoc) + * @see org.dbxml.core.filer.BTree#getBTreeSyncPeriod() + */ + protected long getBTreeSyncPeriod() { + return 1000; + } + + + /* (non-Javadoc) + * @see org.exist.storage.store.BFile#getDataSyncPeriod() + */ + protected long getDataSyncPeriod() { + return 1000; + } } |
From: Wolfgang M. M. <wol...@us...> - 2004-06-04 09:47:37
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/storage/sync In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv30013/src/org/exist/storage/sync Modified Files: SyncDaemon.java Log Message: Improved the periodic flushing of cache contents: this is now handled by a background thread (SyncDaemon) instead of the cache object itself. Different settings are possible for different files. The vital files, dom.dbx and collections.dbx, are flushed very often (every second). The other files can be reconstructed and are thus written less frequently. Index: SyncDaemon.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/sync/SyncDaemon.java,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** SyncDaemon.java 9 Sep 2003 07:29:04 -0000 1.3 --- SyncDaemon.java 4 Jun 2004 09:47:26 -0000 1.4 *************** *** 246,250 **** if (period <= 0) return null; ! LOG.debug("adding command for periodic execution"); long firstTime = System.currentTimeMillis(); if (!startNow) --- 246,250 ---- if (period <= 0) return null; ! // LOG.debug("adding command for periodic execution"); long firstTime = System.currentTimeMillis(); if (!startNow) |
From: Wolfgang M. M. <wol...@us...> - 2004-06-04 09:45:37
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/storage In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29682/src/org/exist/storage Modified Files: NativeTextEngine.java Log Message: Improved the periodic flushing of cache contents: this is now handled by a background thread (SyncDaemon) instead of the cache object itself. Different settings are possible for different files. The vital files, dom.dbx and collections.dbx, are flushed very often (every second). The other files can be reconstructed and are thus written less frequently. Index: NativeTextEngine.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/NativeTextEngine.java,v retrieving revision 1.54 retrieving revision 1.55 diff -C2 -d -r1.54 -r1.55 *** NativeTextEngine.java 3 Jun 2004 10:32:32 -0000 1.54 --- NativeTextEngine.java 4 Jun 2004 09:45:27 -0000 1.55 *************** *** 140,144 **** try { if ((dbWords = (BFile) config.getProperty("db-connection.words")) == null) { ! dbWords = new BFile(new File(dataDir + pathSep + "words.dbx"), indexBuffers, dataBuffers); if (!dbWords.exists()) --- 140,144 ---- try { if ((dbWords = (BFile) config.getProperty("db-connection.words")) == null) { ! dbWords = new BFile(broker.getBrokerPool(), new File(dataDir + pathSep + "words.dbx"), indexBuffers, dataBuffers); if (!dbWords.exists()) |
From: Wolfgang M. M. <wol...@us...> - 2004-06-04 09:44:57
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/storage In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29447/src/org/exist/storage Modified Files: BrokerPool.java Log Message: Improved the periodic flushing of cache contents: this is now handled by a background thread (SyncDaemon) instead of the cache object itself. Different settings are possible for different files. The vital files, dom.dbx and collections.dbx, are flushed very often (every second). The other files can be reconstructed and are thus written less frequently. Index: BrokerPool.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/BrokerPool.java,v retrieving revision 1.22 retrieving revision 1.23 diff -C2 -d -r1.22 -r1.23 *** BrokerPool.java 25 May 2004 09:26:11 -0000 1.22 --- BrokerPool.java 4 Jun 2004 09:44:34 -0000 1.23 *************** *** 302,305 **** --- 302,309 ---- } + public SyncDaemon getSyncDaemon() { + return syncDaemon; + } + /** * Initialize the current instance. |
From: Wolfgang M. M. <wol...@us...> - 2004-06-04 09:43:02
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/collections In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29205/src/org/exist/collections Modified Files: Collection.java Log Message: Improved the periodic flushing of cache contents: this is now handled by a background thread (SyncDaemon) instead of the cache object itself. Different settings are possible for different files. The vital files, dom.dbx and collections.dbx, are flushed very often (every second). The other files can be reconstructed and are thus written less frequently. Index: Collection.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/collections/Collection.java,v retrieving revision 1.26 retrieving revision 1.27 diff -C2 -d -r1.26 -r1.27 *** Collection.java 2 Jun 2004 11:34:35 -0000 1.26 --- Collection.java 4 Jun 2004 09:42:45 -0000 1.27 *************** *** 1508,1512 **** * @see org.exist.storage.cache.Cacheable#release() */ ! public void sync() { } } --- 1508,1513 ---- * @see org.exist.storage.cache.Cacheable#release() */ ! public boolean sync() { ! return false; } } |
From: Wolfgang M. M. <wol...@us...> - 2004-06-04 09:42:28
|
Update of /cvsroot/exist/eXist-1.0/src/org/dbxml/core/filer In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29160/src/org/dbxml/core/filer Modified Files: BTree.java Log Message: Improved the periodic flushing of cache contents: this is now handled by a background thread (SyncDaemon) instead of the cache object itself. Different settings are possible for different files. The vital files, dom.dbx and collections.dbx, are flushed very often (every second). The other files can be reconstructed and are thus written less frequently. Index: BTree.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/dbxml/core/filer/BTree.java,v retrieving revision 1.25 retrieving revision 1.26 diff -C2 -d -r1.25 -r1.26 *** BTree.java 17 May 2004 09:59:43 -0000 1.25 --- BTree.java 4 Jun 2004 09:42:19 -0000 1.26 *************** *** 61,64 **** --- 61,65 ---- import org.dbxml.core.data.Value; import org.dbxml.core.indexer.IndexQuery; + import org.exist.storage.BrokerPool; import org.exist.storage.BufferStats; import org.exist.storage.cache.Cache; *************** *** 67,70 **** --- 68,73 ---- import org.exist.storage.io.VariableByteArrayInput; import org.exist.util.ByteConversion; + import org.exist.util.Lock; + import org.exist.util.LockException; import org.exist.xquery.TerminatedException; *************** *** 91,94 **** --- 94,100 ---- public class BTree extends Paged { + + public final static long BTREE_SYNC_PERIOD = 15000; + protected final static byte LEAF = 1; protected final static byte BRANCH = 2; *************** *** 99,111 **** private BTreeFileHeader fileHeader; protected int buffers = 1024; protected Cache cache; ! public BTree() { ! this(256); ! } ! ! public BTree(int buffers) { super(); this.buffers = buffers; fileHeader = (BTreeFileHeader) getFileHeader(); --- 105,116 ---- private BTreeFileHeader fileHeader; + protected BrokerPool pool; + protected int buffers = 1024; protected Cache cache; ! public BTree(BrokerPool pool, int buffers) { super(); + this.pool = pool; this.buffers = buffers; fileHeader = (BTreeFileHeader) getFileHeader(); *************** *** 114,132 **** } ! public BTree(File file) { ! this(); ! setFile(file); ! } ! ! public BTree(File file, int buffers) { ! this(buffers); setFile(file); } ! public boolean open(short expectedVersion) throws DBException { if (super.open(expectedVersion)) { ! cache = new LRDCache(buffers); ! cache.setFileName(getFile().getName()); ! //rootNode = getBTreeNode( fileHeader.getRootPage(), null ); return true; } else --- 119,130 ---- } ! public BTree(BrokerPool pool, File file, int buffers) { ! this(pool, buffers); setFile(file); } ! public boolean open(short expectedVersion, Lock lock) throws DBException { if (super.open(expectedVersion)) { ! initCache(lock); return true; } else *************** *** 134,146 **** } ! public boolean create() throws DBException { ! return create((short) - 1); } ! public boolean create(short fixedKeyLen) throws DBException { if (super.create()) try { ! cache = new LRDCache(buffers); ! cache.setFileName(getFile().getName()); createRootNode(); fileHeader.setFixedKeyLen(fixedKeyLen); --- 132,143 ---- } ! public boolean create(Lock lock) throws DBException { ! return create((short) - 1, lock); } ! public boolean create(short fixedKeyLen, Lock lock) throws DBException { if (super.create()) try { ! initCache(lock); createRootNode(); fileHeader.setFixedKeyLen(fixedKeyLen); *************** *** 153,156 **** --- 150,177 ---- } + private void initCache(final Lock lock) { + cache = new LRDCache(buffers); + cache.setFileName(getFile().getName()); + + Runnable syncAction = new Runnable() { + public void run() { + try { + // LOG.debug("Triggering cache sync for " + getFile().getName()); + lock.acquire(Lock.WRITE_LOCK); + cache.flush(); + } catch (LockException e) { + LOG.warn("Failed to acquire lock on dom.dbx"); + } finally { + lock.release(); + } + } + }; + pool.getSyncDaemon().executePeriodically(getBTreeSyncPeriod(), syncAction, false); + } + + protected long getBTreeSyncPeriod() { + return BTREE_SYNC_PERIOD; + } + public short getFixedKeyLen() { BTreeFileHeader fileHeader = (BTreeFileHeader) getFileHeader(); *************** *** 375,385 **** } ! public void sync() { if(isDirty()) try { write(); } catch (IOException e) { LOG.warn("IO error while writing page: " + page.getPageNum(), e); } } --- 396,408 ---- } ! public boolean sync() { if(isDirty()) try { write(); + return true; } catch (IOException e) { LOG.warn("IO error while writing page: " + page.getPageNum(), e); } + return false; } |
From: Wolfgang M. M. <wol...@us...> - 2004-06-03 10:32:40
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/storage In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv30875/src/org/exist/storage Modified Files: NativeTextEngine.java Log Message: * Fixed previous-sibling/following-sibling axis support. * Reverse axis steps now number the returned nodes in reverse document order. Index: NativeTextEngine.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/NativeTextEngine.java,v retrieving revision 1.53 retrieving revision 1.54 diff -C2 -d -r1.53 -r1.54 *** NativeTextEngine.java 2 Jun 2004 11:34:35 -0000 1.53 --- NativeTextEngine.java 3 Jun 2004 10:32:32 -0000 1.54 *************** *** 438,442 **** collectionId = collection.getId(); if (startTerm != null && startTerm.length() > 0) ! ref = new WordRef(collectionId, startTerm.toString()); else ref = new WordRef(collectionId); --- 438,442 ---- collectionId = collection.getId(); if (startTerm != null && startTerm.length() > 0) ! ref = new WordRef(collectionId, startTerm.toString().toLowerCase()); else ref = new WordRef(collectionId); |
From: Wolfgang M. M. <wol...@us...> - 2004-06-03 10:32:00
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/xquery In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv30700/src/org/exist/xquery Modified Files: FilteredExpression.java LocationStep.java Predicate.java Constants.java Log Message: * Fixed previous-sibling/following-sibling axis support. * Reverse axis steps now number the returned nodes in reverse document order. Index: Predicate.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/xquery/Predicate.java,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** Predicate.java 28 May 2004 10:54:13 -0000 1.2 --- Predicate.java 3 Jun 2004 10:31:36 -0000 1.3 *************** *** 67,71 **** public Sequence evalPredicate( Sequence outerSequence, ! Sequence contextSequence) throws XPathException { setInPredicate(true); --- 67,72 ---- public Sequence evalPredicate( Sequence outerSequence, ! Sequence contextSequence, ! int mode) throws XPathException { setInPredicate(true); *************** *** 138,151 **** NodeSet contextSet = contextSequence.toNodeSet(); for(SequenceIterator i = outerSequence.iterate(); i.hasNext(); ) { ! Item item = i.nextItem(); ! NodeProxy p = (NodeProxy)item; ! Sequence temp = contextSet.selectAncestorDescendant(p, NodeSet.DESCENDANT); ! Sequence innerSeq = inner.eval(contextSequence); ! for(SequenceIterator j = innerSeq.iterate(); j.hasNext(); ) { ! NumericValue v = (NumericValue)j.nextItem().convertTo(Type.NUMBER); ! int pos = v.getInt() - 1; ! if(pos < temp.getLength() && pos > -1) ! result.add(temp.itemAt(pos)); ! } } return result; --- 139,175 ---- NodeSet contextSet = contextSequence.toNodeSet(); for(SequenceIterator i = outerSequence.iterate(); i.hasNext(); ) { ! Item item = i.nextItem(); ! NodeProxy p = (NodeProxy)item; ! Sequence temp; ! switch(mode) { ! case Constants.FOLLOWING_SIBLING_AXIS: ! temp = contextSet.selectSiblings(p, NodeSet.FOLLOWING); ! break; ! case Constants.PRECEDING_SIBLING_AXIS: ! temp = contextSet.selectSiblings(p, NodeSet.PRECEDING); ! break; ! case Constants.PARENT_AXIS: ! temp = p.getParents(); ! break; ! case Constants.ANCESTOR_AXIS: ! case Constants.ANCESTOR_SELF_AXIS: ! temp = contextSet.selectAncestors(p, false, false); ! break; ! case Constants.SELF_AXIS: ! temp = p; ! break; ! default: ! temp = contextSet.selectAncestorDescendant(p, NodeSet.DESCENDANT); ! break; ! } ! boolean reverseAxis = isReverseAxis(mode); ! Sequence innerSeq = inner.eval(contextSequence); ! for(SequenceIterator j = innerSeq.iterate(); j.hasNext(); ) { ! Item next = j.nextItem(); ! NumericValue v = (NumericValue)next.convertTo(Type.NUMBER); ! int pos = (reverseAxis ? temp.getLength() - v.getInt() : v.getInt() - 1); ! if(pos < temp.getLength() && pos > -1) ! result.add(temp.itemAt(pos)); ! } } return result; *************** *** 166,168 **** --- 190,195 ---- } + public final static boolean isReverseAxis(int axis) { + return axis < Constants.CHILD_AXIS; + } } Index: LocationStep.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/xquery/LocationStep.java,v retrieving revision 1.11 retrieving revision 1.12 diff -C2 -d -r1.11 -r1.12 *** LocationStep.java 2 Jun 2004 12:12:53 -0000 1.11 --- LocationStep.java 3 Jun 2004 10:31:36 -0000 1.12 *************** *** 96,100 **** for (Iterator i = predicates.iterator(); i.hasNext();) { pred = (Predicate) i.next(); ! result = pred.evalPredicate(outerSequence, result); } return result; --- 96,100 ---- for (Iterator i = predicates.iterator(); i.hasNext();) { pred = (Predicate) i.next(); ! result = pred.evalPredicate(outerSequence, result, axis); } return result; *************** *** 300,305 **** test.getName(), null); } ! result = contextSet.selectSiblings( ! currentSet, axis == Constants.PRECEDING_SIBLING_AXIS ? NodeSet.PRECEDING --- 300,305 ---- test.getName(), null); } ! result = currentSet.selectSiblings( ! contextSet, axis == Constants.PRECEDING_SIBLING_AXIS ? NodeSet.PRECEDING *************** *** 367,372 **** } result = ! contextSet.selectAncestors( ! currentSet, axis == Constants.ANCESTOR_SELF_AXIS, inPredicate); --- 367,372 ---- } result = ! currentSet.selectAncestors( ! contextSet, axis == Constants.ANCESTOR_SELF_AXIS, inPredicate); *************** *** 390,393 **** --- 390,396 ---- } + /** + * TODO: works only for .. + */ protected NodeSet getParents( XQueryContext context, Index: FilteredExpression.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/xquery/FilteredExpression.java,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** FilteredExpression.java 28 May 2004 10:54:12 -0000 1.2 --- FilteredExpression.java 3 Jun 2004 10:31:36 -0000 1.3 *************** *** 69,73 **** for (Iterator i = predicates.iterator(); i.hasNext();) { pred = (Predicate) i.next(); ! result = pred.evalPredicate(contextSequence, result); } return result; --- 69,73 ---- for (Iterator i = predicates.iterator(); i.hasNext();) { pred = (Predicate) i.next(); ! result = pred.evalPredicate(contextSequence, result, Constants.DESCENDANT_SELF_AXIS); } return result; Index: Constants.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/xquery/Constants.java,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** Constants.java 29 Jan 2004 15:06:42 -0000 1.1 --- Constants.java 3 Jun 2004 10:31:36 -0000 1.2 *************** *** 1,5 **** package org.exist.xquery; - public interface Constants { --- 1,26 ---- + /* + * eXist Open Source Native XML Database + * Copyright (C) 2001-03 Wolfgang M. Meier + * wol...@ex... + * http://exist.sourceforge.net + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. + * + * $Id$ + */ package org.exist.xquery; public interface Constants { *************** *** 20,38 **** "attribute-descendant" }; ! public final static int ANCESTOR_AXIS = 0; public final static int ANCESTOR_SELF_AXIS = 1; ! public final static int ATTRIBUTE_AXIS = 2; ! public final static int CHILD_AXIS = 3; ! public final static int DESCENDANT_AXIS = 4; ! public final static int DESCENDANT_SELF_AXIS = 5; ! public final static int FOLLOWING_AXIS = 6; ! public final static int FOLLOWING_SIBLING_AXIS = 7; ! public final static int NAMESPACE_AXIS = 8; ! public final static int PARENT_AXIS = 9; ! public final static int PRECEDING_AXIS = 10; ! public final static int PRECEDING_SIBLING_AXIS = 11; ! public final static int SELF_AXIS = 12; ! public final static int DESCENDANT_ATTRIBUTE_AXIS = 13; /** --- 41,62 ---- "attribute-descendant" }; ! ! // Reverse axes public final static int ANCESTOR_AXIS = 0; public final static int ANCESTOR_SELF_AXIS = 1; ! public final static int PARENT_AXIS = 2; ! public final static int PRECEDING_AXIS = 3; ! public final static int PRECEDING_SIBLING_AXIS = 4; ! ! // Forward axes ! public final static int CHILD_AXIS = 10; ! public final static int ATTRIBUTE_AXIS = 11; ! public final static int DESCENDANT_AXIS = 12; ! public final static int DESCENDANT_SELF_AXIS = 13; ! public final static int FOLLOWING_AXIS = 14; ! public final static int FOLLOWING_SIBLING_AXIS = 15; ! public final static int NAMESPACE_AXIS = 16; ! public final static int SELF_AXIS = 17; ! public final static int DESCENDANT_ATTRIBUTE_AXIS = 18; /** *************** *** 104,107 **** --- 128,133 ---- public final static int FULLTEXT_OR = 0; public final static int FULLTEXT_AND = 1; + + } |
From: Wolfgang M. M. <wol...@us...> - 2004-06-03 10:31:56
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/dom In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv30700/src/org/exist/dom Modified Files: AbstractNodeSet.java DocumentImpl.java ExtArrayNodeSet.java NodeProxy.java NodeSet.java Log Message: * Fixed previous-sibling/following-sibling axis support. * Reverse axis steps now number the returned nodes in reverse document order. Index: AbstractNodeSet.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/dom/AbstractNodeSet.java,v retrieving revision 1.17 retrieving revision 1.18 diff -C2 -d -r1.17 -r1.18 *** AbstractNodeSet.java 2 Jun 2004 12:12:53 -0000 1.17 --- AbstractNodeSet.java 3 Jun 2004 10:31:37 -0000 1.18 *************** *** 435,439 **** */ public NodeSet selectAncestors( ! NodeSet al, boolean includeSelf, boolean rememberContext) { --- 435,439 ---- */ public NodeSet selectAncestors( ! NodeSet dl, boolean includeSelf, boolean rememberContext) { *************** *** 441,447 **** NodeSet result = new ExtArrayNodeSet(); NodeSet ancestors; ! for (Iterator i = iterator(); i.hasNext();) { n = (NodeProxy) i.next(); ! ancestors = al.ancestorsForChild(n.doc, n.gid, false, includeSelf, -1); for(Iterator j = ancestors.iterator(); j.hasNext(); ) { p = (NodeProxy) j.next(); --- 441,447 ---- NodeSet result = new ExtArrayNodeSet(); NodeSet ancestors; ! for (Iterator i = dl.iterator(); i.hasNext();) { n = (NodeProxy) i.next(); ! ancestors = ancestorsForChild(n.doc, n.gid, false, includeSelf, -1); for(Iterator j = ancestors.iterator(); j.hasNext(); ) { p = (NodeProxy) j.next(); *************** *** 509,514 **** return EMPTY_SET; NodeSet result = new ExtArrayNodeSet(); ! Iterator ia = iterator(); ! Iterator ib = siblings.iterator(); NodeProxy na = (NodeProxy) ia.next(), nb = (NodeProxy) ib.next(); long pa, pb; --- 509,514 ---- return EMPTY_SET; NodeSet result = new ExtArrayNodeSet(); ! Iterator ia = siblings.iterator(); ! Iterator ib = iterator(); NodeProxy na = (NodeProxy) ia.next(), nb = (NodeProxy) ib.next(); long pa, pb; *************** *** 941,943 **** --- 941,957 ---- return false; } + + public String pprint() { + StringBuffer buf = new StringBuffer(); + buf.append('['); + buf.append(getClass().getName()); + buf.append(' '); + for(Iterator i = iterator(); i.hasNext(); ) { + NodeProxy p = (NodeProxy) i.next(); + buf.append(p.pprint()); + buf.append(' '); + } + buf.append(']'); + return buf.toString(); + } } Index: DocumentImpl.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/dom/DocumentImpl.java,v retrieving revision 1.39 retrieving revision 1.40 diff -C2 -d -r1.39 -r1.40 *** DocumentImpl.java 2 Jun 2004 11:34:35 -0000 1.39 --- DocumentImpl.java 3 Jun 2004 10:31:37 -0000 1.40 *************** *** 249,253 **** } ! public int compareTo(Object other) { final long otherId = ((DocumentImpl)other).docId; if (otherId == docId) --- 249,253 ---- } ! public final int compareTo(Object other) { final long otherId = ((DocumentImpl)other).docId; if (otherId == docId) Index: NodeProxy.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/dom/NodeProxy.java,v retrieving revision 1.30 retrieving revision 1.31 diff -C2 -d -r1.30 -r1.31 *** NodeProxy.java 28 May 2004 10:54:15 -0000 1.30 --- NodeProxy.java 3 Jun 2004 10:31:37 -0000 1.31 *************** *** 289,292 **** --- 289,296 ---- } + public String pprint() { + return doc.getDocId() + ":" + gid; + } + public static class NodeProxyComparator implements Comparator { Index: ExtArrayNodeSet.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/dom/ExtArrayNodeSet.java,v retrieving revision 1.19 retrieving revision 1.20 diff -C2 -d -r1.19 -r1.20 *** ExtArrayNodeSet.java 28 May 2004 10:54:15 -0000 1.19 --- ExtArrayNodeSet.java 3 Jun 2004 10:31:37 -0000 1.20 *************** *** 22,28 **** import java.util.Iterator; import org.exist.util.FastQSort; import org.exist.util.Range; - import org.exist.util.hashtable.Int2ObjectHashMap; import org.exist.xquery.value.Item; import org.exist.xquery.value.SequenceIterator; --- 22,30 ---- import java.util.Iterator; + import java.util.Map; + import java.util.TreeMap; + import org.exist.util.FastQSort; import org.exist.util.Range; import org.exist.xquery.value.Item; import org.exist.xquery.value.SequenceIterator; *************** *** 52,56 **** public final class ExtArrayNodeSet extends AbstractNodeSet { ! private Int2ObjectHashMap map; private int initalSize = 128; private int size = 0; --- 54,58 ---- public final class ExtArrayNodeSet extends AbstractNodeSet { ! private TreeMap map; private int initalSize = 128; private int size = 0; *************** *** 67,71 **** public ExtArrayNodeSet() { ! this.map = new Int2ObjectHashMap(17); } --- 69,73 ---- public ExtArrayNodeSet() { ! this.map = new TreeMap(); } *************** *** 84,88 **** public ExtArrayNodeSet(int initialDocsCount, int initialArraySize) { this.initalSize = initialArraySize; ! this.map = new Int2ObjectHashMap(initialDocsCount); } --- 86,90 ---- public ExtArrayNodeSet(int initialDocsCount, int initialArraySize) { this.initalSize = initialArraySize; ! this.map = new TreeMap(); } *************** *** 128,135 **** if (doc.docId == lastDoc && lastPart != null) return lastPart; ! Part part = (Part) map.get(doc.docId); if (part == null && create) { part = new Part(sizeHint, doc); ! map.put(doc.docId, part); } lastPart = part; --- 130,137 ---- if (doc.docId == lastDoc && lastPart != null) return lastPart; ! Part part = (Part) map.get(doc); if (part == null && create) { part = new Part(sizeHint, doc); ! map.put(doc, part); } lastPart = part; *************** *** 174,178 **** */ public boolean containsDoc(DocumentImpl doc) { ! return map.containsKey(doc.docId); } --- 176,180 ---- */ public boolean containsDoc(DocumentImpl doc) { ! return map.containsKey(doc); } *************** *** 237,241 **** int count = 0; Part part; ! for (Iterator i = map.valueIterator(); i.hasNext();) { part = (Part) i.next(); if (count + part.length > pos) --- 239,243 ---- int count = 0; Part part; ! for (Iterator i = map.values().iterator(); i.hasNext();) { part = (Part) i.next(); if (count + part.length > pos) *************** *** 288,292 **** part.remove(node); if (part.length == 0) ! map.remove(node.doc.getDocId()); setHasChanged(); } --- 290,294 ---- part.remove(node); if (part.length == 0) ! map.remove(node.doc); setHasChanged(); } *************** *** 311,315 **** Part part; size = 0; ! for (Iterator i = map.valueIterator(); i.hasNext();) { part = (Part) i.next(); part.sort(); --- 313,317 ---- Part part; size = 0; ! for (Iterator i = map.values().iterator(); i.hasNext();) { part = (Part) i.next(); part.sort(); *************** *** 328,332 **** Part part; size = 0; ! for (Iterator i = map.valueIterator(); i.hasNext();) { part = (Part) i.next(); part.sortInDocumentOrder(); --- 330,334 ---- Part part; size = 0; ! for (Iterator i = map.values().iterator(); i.hasNext();) { part = (Part) i.next(); part.sortInDocumentOrder(); *************** *** 346,350 **** public void setSelfAsContext() { Part part; ! for (Iterator i = map.valueIterator(); i.hasNext();) { part = (Part) i.next(); part.setSelfAsContext(); --- 348,352 ---- public void setSelfAsContext() { Part part; ! for (Iterator i = map.values().iterator(); i.hasNext();) { part = (Part) i.next(); part.setSelfAsContext(); *************** *** 401,405 **** sort(); Part part; ! for (Iterator i = map.valueIterator(); i.hasNext();) { part = (Part) i.next(); cachedDocuments.add(part.doc, false); --- 403,407 ---- sort(); Part part; ! for (Iterator i = map.values().iterator(); i.hasNext();) { part = (Part) i.next(); cachedDocuments.add(part.doc, false); *************** *** 658,663 **** NodeProxy next = null; ! ExtArrayIterator(Int2ObjectHashMap map) { ! docsIterator = map.valueIterator(); if (docsIterator.hasNext()) currentPart = (Part) docsIterator.next(); --- 660,665 ---- NodeProxy next = null; ! ExtArrayIterator(Map map) { ! docsIterator = map.values().iterator(); if (docsIterator.hasNext()) currentPart = (Part) docsIterator.next(); Index: NodeSet.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/dom/NodeSet.java,v retrieving revision 1.26 retrieving revision 1.27 diff -C2 -d -r1.26 -r1.27 *** NodeSet.java 2 Jun 2004 12:12:53 -0000 1.26 --- NodeSet.java 3 Jun 2004 10:31:37 -0000 1.27 *************** *** 461,463 **** --- 461,465 ---- public int getState(); + + public String pprint(); } \ No newline at end of file |
From: Wolfgang M. M. <wol...@us...> - 2004-06-03 10:28:21
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/xquery/functions/text In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv30142/src/org/exist/xquery/functions/text Modified Files: AbstractMatchFunction.java FuzzyMatchAll.java Log Message: Fixed text:fuzzy-match-all, text:fuzzy-match-any functions. Index: FuzzyMatchAll.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/xquery/functions/text/FuzzyMatchAll.java,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** FuzzyMatchAll.java 17 May 2004 09:59:46 -0000 1.2 --- FuzzyMatchAll.java 3 Jun 2004 10:28:12 -0000 1.3 *************** *** 23,26 **** --- 23,28 ---- package org.exist.xquery.functions.text; + import java.util.List; + import org.exist.dom.NodeSet; import org.exist.dom.QName; *************** *** 61,66 **** public Sequence evalQuery(XQueryContext context, NodeSet nodes, ! String[] terms) throws XPathException { ! if (terms == null || terms.length == 0) return Sequence.EMPTY_SEQUENCE; // no search terms double threshold = 0.65; --- 63,68 ---- public Sequence evalQuery(XQueryContext context, NodeSet nodes, ! List terms) throws XPathException { ! if (terms == null || terms.size() == 0) return Sequence.EMPTY_SEQUENCE; // no search terms double threshold = 0.65; *************** *** 72,83 **** threshold = ((DoubleValue) thresOpt.convertTo(Type.DOUBLE)).getDouble(); } ! NodeSet hits[] = new NodeSet[terms.length]; ! String prefix; TermMatcher matcher; ! for (int k = 0; k < terms.length; k++) { ! if(terms[k].length() == 0) hits[k] = null; else { ! matcher = new FuzzyMatcher(terms[k], threshold); hits[k] = context.getBroker().getTextEngine().getNodes( --- 74,86 ---- threshold = ((DoubleValue) thresOpt.convertTo(Type.DOUBLE)).getDouble(); } ! NodeSet hits[] = new NodeSet[terms.size()]; ! String term; TermMatcher matcher; ! for (int k = 0; k < terms.size(); k++) { ! term = (String)terms.get(k); ! if(term.length() == 0) hits[k] = null; else { ! matcher = new FuzzyMatcher(term, threshold); hits[k] = context.getBroker().getTextEngine().getNodes( *************** *** 85,89 **** nodes.getDocumentSet(), nodes, ! matcher, terms[k].substring(0, 1)); } } --- 88,92 ---- nodes.getDocumentSet(), nodes, ! matcher, term.substring(0, 1)); } } Index: AbstractMatchFunction.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/xquery/functions/text/AbstractMatchFunction.java,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** AbstractMatchFunction.java 11 May 2004 08:56:51 -0000 1.2 --- AbstractMatchFunction.java 3 Jun 2004 10:28:04 -0000 1.3 *************** *** 46,50 **** public abstract Sequence evalQuery(XQueryContext context, NodeSet nodes, ! String[] terms) throws XPathException; public NodeSet mergeResults(NodeSet[] hits) { --- 46,50 ---- public abstract Sequence evalQuery(XQueryContext context, NodeSet nodes, ! List terms) throws XPathException; public NodeSet mergeResults(NodeSet[] hits) { |
From: Wolfgang M. M. <wol...@us...> - 2004-06-03 10:28:21
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/xquery/functions In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv30142/src/org/exist/xquery/functions Modified Files: ExtRegexp.java Log Message: Fixed text:fuzzy-match-all, text:fuzzy-match-any functions. Index: ExtRegexp.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/xquery/functions/ExtRegexp.java,v retrieving revision 1.5 retrieving revision 1.6 diff -C2 -d -r1.5 -r1.6 *** ExtRegexp.java 28 May 2004 10:54:09 -0000 1.5 --- ExtRegexp.java 3 Jun 2004 10:28:13 -0000 1.6 *************** *** 103,107 **** Expression path = getArgument(0); if ((getDependencies() & Dependency.CONTEXT_ITEM) == Dependency.NO_DEPENDENCY) { - LOG.debug("single execution"); NodeSet nodes = path == null --- 103,106 ---- |
From: Wolfgang M. M. <wol...@us...> - 2004-06-03 10:27:15
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/xquery/functions In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv30022/src/org/exist/xquery/functions Modified Files: ModuleImpl.java Added Files: FunUnordered.java Log Message: Added fn:unordered function. --- NEW FILE: FunUnordered.java --- /* * eXist Open Source Native XML Database * Copyright (C) 2001-04 Wolfgang M. Meier * wol...@ex... * http://exist-db.org * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. * * $Id: FunUnordered.java,v 1.1 2004/06/03 10:27:05 wolfgang_m Exp $ */ package org.exist.xquery.functions; import org.exist.dom.QName; import org.exist.xquery.Cardinality; import org.exist.xquery.Function; import org.exist.xquery.FunctionSignature; import org.exist.xquery.XPathException; import org.exist.xquery.XQueryContext; import org.exist.xquery.value.Item; import org.exist.xquery.value.Sequence; import org.exist.xquery.value.SequenceType; import org.exist.xquery.value.Type; /** * The XQuery fn:unordered function. Currently, this function has no effect in eXist, * but it might be used for future optimizations. * * @author wolf */ public class FunUnordered extends Function { public final static FunctionSignature signature = new FunctionSignature( new QName("unordered", BUILTIN_FUNCTION_NS), "Takes a sequence as input and returns an arbitrary implementation dependent permutation " + "of the input sequence. Currently, this has no effect in eXist, but it might be used for future optimizations.", new SequenceType[] { new SequenceType(Type.ITEM, Cardinality.ZERO_OR_MORE) }, new SequenceType(Type.ITEM, Cardinality.ZERO_OR_MORE)); public FunUnordered(XQueryContext context) { super(context, signature); } /* (non-Javadoc) * @see org.exist.xquery.Expression#eval(org.exist.xquery.value.Sequence, org.exist.xquery.value.Item) */ public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathException { return getArgument(0).eval(contextSequence, contextItem); } /* (non-Javadoc) * @see org.exist.xquery.Function#getCardinality() */ public int getCardinality() { return getArgument(0).getCardinality(); } /* (non-Javadoc) * @see org.exist.xquery.Function#returnsType() */ public int returnsType() { return getArgument(0).returnsType(); } } Index: ModuleImpl.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/xquery/functions/ModuleImpl.java,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** ModuleImpl.java 28 May 2004 10:54:09 -0000 1.2 --- ModuleImpl.java 3 Jun 2004 10:27:05 -0000 1.3 *************** *** 99,102 **** --- 99,103 ---- new FunctionDef(FunUpperCase.signature, FunUpperCase.class), new FunctionDef(FunZeroOrOne.signature, FunZeroOrOne.class), + new FunctionDef(FunUnordered.signature, FunUnordered.class), new FunctionDef(ExtCollection.signature, ExtCollection.class), new FunctionDef(ExtXCollection.signature, ExtXCollection.class), |
From: Giulio V. <gva...@us...> - 2004-06-03 07:30:40
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/storage In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv29285/src/org/exist/storage Modified Files: NativeBroker.java Log Message: Index: NativeBroker.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/storage/NativeBroker.java,v retrieving revision 1.77 retrieving revision 1.78 diff -C2 -d -r1.77 -r1.78 *** NativeBroker.java 2 Jun 2004 11:34:35 -0000 1.77 --- NativeBroker.java 3 Jun 2004 07:30:29 -0000 1.78 *************** *** 1168,1173 **** // check if this textual content should be fulltext-indexed // by calling IndexPaths.match(path) ! if (idx == null || idx.match(currentPath)) ! textEngine.storeText(idx, (TextImpl) node, false); break; } --- 1168,1174 ---- // check if this textual content should be fulltext-indexed // by calling IndexPaths.match(path) ! if (idx == null || idx.match(currentPath)){ ! boolean valore = (idx == null ? false : idx.preserveContent(currentPath)); ! textEngine.storeText(idx, (TextImpl) node, valore);} break; } |
From: Wolfgang M. M. <wol...@us...> - 2004-06-02 12:13:03
|
Update of /cvsroot/exist/eXist-1.0/src/org/exist/dom In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv10158/src/org/exist/dom Modified Files: AbstractNodeSet.java NodeSet.java Log Message: Fixed bug: ancestor::ncname returned only the first matching ancestor found. Index: AbstractNodeSet.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/dom/AbstractNodeSet.java,v retrieving revision 1.16 retrieving revision 1.17 diff -C2 -d -r1.16 -r1.17 *** AbstractNodeSet.java 28 May 2004 10:54:15 -0000 1.16 --- AbstractNodeSet.java 2 Jun 2004 12:12:53 -0000 1.17 *************** *** 440,455 **** NodeProxy n, p, temp; NodeSet result = new ExtArrayNodeSet(); for (Iterator i = iterator(); i.hasNext();) { n = (NodeProxy) i.next(); ! p = al.parentWithChild(n.doc, n.gid, false, includeSelf, -1); ! if (p != null) { ! if ((temp = result.get(p)) == null) { ! if (rememberContext) ! p.addContextNode(n); ! else ! p.copyContext(n); ! result.add(p); ! } else if (rememberContext) ! temp.addContextNode(n); } } --- 440,459 ---- NodeProxy n, p, temp; NodeSet result = new ExtArrayNodeSet(); + NodeSet ancestors; for (Iterator i = iterator(); i.hasNext();) { n = (NodeProxy) i.next(); ! ancestors = al.ancestorsForChild(n.doc, n.gid, false, includeSelf, -1); ! for(Iterator j = ancestors.iterator(); j.hasNext(); ) { ! p = (NodeProxy) j.next(); ! if (p != null) { ! if ((temp = result.get(p)) == null) { ! if (rememberContext) ! p.addContextNode(n); ! else ! p.copyContext(n); ! result.add(p); ! } else if (rememberContext) ! temp.addContextNode(n); ! } } } *************** *** 682,685 **** --- 686,716 ---- /** + * Return all nodes contained in this node set that are ancestors of the node + * identified by doc and gid. + */ + public NodeSet ancestorsForChild( + DocumentImpl doc, + long gid, + boolean directParent, + boolean includeSelf, + int level) { + NodeSet result = new ArraySet(5); + NodeProxy temp; + if (includeSelf && (temp = get(doc, gid)) != null) + result.add(temp); + if (level < 0) + level = doc.getTreeLevel(gid); + while (gid > 0) { + gid = XMLUtil.getParentId(doc, gid, level); + if ((temp = get(doc, gid)) != null) + result.add(temp); + else if (directParent) + return result; + --level; + } + return result; + } + + /** * Return a new node set containing the parent nodes of all nodes in the * current set. Index: NodeSet.java =================================================================== RCS file: /cvsroot/exist/eXist-1.0/src/org/exist/dom/NodeSet.java,v retrieving revision 1.25 retrieving revision 1.26 diff -C2 -d -r1.25 -r1.26 *** NodeSet.java 25 Feb 2004 15:31:58 -0000 1.25 --- NodeSet.java 2 Jun 2004 12:12:53 -0000 1.26 *************** *** 365,369 **** boolean includeSelf, int level); ! /** * Return a new node set containing the parent nodes of all nodes in the --- 365,381 ---- boolean includeSelf, int level); ! ! /** ! * Return all nodes contained in this node set that are ancestors of the node ! * identified by doc and gid. ! * ! */ ! public NodeSet ancestorsForChild( ! DocumentImpl doc, ! long gid, ! boolean directParent, ! boolean includeSelf, ! int level); ! /** * Return a new node set containing the parent nodes of all nodes in the |