|
From: Bryan T. <tho...@us...> - 2007-04-10 18:34:22
|
Update of /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx In directory sc8-pr-cvs4.sourceforge.net:/tmp/cvs-serv24584/src/java/com/bigdata/objndx Modified Files: NodeSerializer.java AbstractBTree.java IndexSegmentBuilder.java Leaf.java IndexSegment.java Added Files: DataInputBuffer.java DataOutputBuffer.java Log Message: Fixed a bug in UUID assignment to index segments. Added some logic for fast data output for btree nodes and leaves. Index: IndexSegmentBuilder.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/IndexSegmentBuilder.java,v retrieving revision 1.30 retrieving revision 1.31 diff -C2 -d -r1.30 -r1.31 *** IndexSegmentBuilder.java 27 Mar 2007 17:11:41 -0000 1.30 --- IndexSegmentBuilder.java 10 Apr 2007 18:33:31 -0000 1.31 *************** *** 1470,1474 **** plan.nentries, maxNodeOrLeafLength, addrLeaves, addrNodes, addrRoot, addrExtensionMetadata, addrBloom, errorRate, out ! .length(), segmentUUID, indexUUID, now); md.write(out); --- 1470,1474 ---- plan.nentries, maxNodeOrLeafLength, addrLeaves, addrNodes, addrRoot, addrExtensionMetadata, addrBloom, errorRate, out ! .length(), indexUUID, segmentUUID, now); md.write(out); --- NEW FILE: DataOutputBuffer.java --- /** The Notice below must appear in each file of the Source Code of any copy you distribute of the Licensed Product. Contributors to any Modifications may add their own copyright notices to identify their own contributions. License: The contents of this file are subject to the CognitiveWeb Open Source License Version 1.1 (the License). You may not copy or use this file, in either source code or executable form, except in compliance with the License. You may obtain a copy of the License from http://www.CognitiveWeb.org/legal/license/ Software distributed under the License is distributed on an AS IS basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the specific language governing rights and limitations under the License. Copyrights: Portions created by or assigned to CognitiveWeb are Copyright (c) 2003-2003 CognitiveWeb. All Rights Reserved. Contact information for CognitiveWeb is available at http://www.CognitiveWeb.org Portions Copyright (c) 2002-2003 Bryan Thompson. Acknowledgements: Special thanks to the developers of the Jabber Open Source License 1.0 (JOSL), from which this License was derived. This License contains terms that differ from JOSL. Special thanks to the CognitiveWeb Open Source Contributors for their suggestions and support of the Cognitive Web. Modifications: */ /* * Created on Apr 7, 2007 */ package com.bigdata.objndx; import java.io.ByteArrayOutputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import com.bigdata.io.ByteBufferOutputStream; /** * Fast special purpose serialization onto a managed byte[] buffer. * * @todo IO costs might be reduced by special purpose routines using a counter * and a pre-/re-sized byte[] with direct byte[] access. This code would * be similar to the {@link KeyBuilder} in its handling of the byte[], but * would use standard Java data types rather than their unsigned variants * and would support packing of unsigned short, int, and long values. I * estimate 29% of the costs of the index load performance benchmark is * serialization with 13% of the total cost being the * {@link DataOutputStream} over a {@link ByteArrayOutputStream} or * {@link ByteBufferOutputStream}. The class could be a * {@link DataOutput} implementation that had intimate knowledge of a * backing byte[]. If we go this route, then * {@link DataOutput#writeChars(String)} and * {@link DataOutput#writeUTF(String)} might throw exceptions since they * are not used when (de-)serializing nodes and leaves. * * @author <a href="mailto:tho...@us...">Bryan Thompson</a> * @version $Id$ */ public class DataOutputBuffer implements DataOutput { /** * The default capacity of the buffer. */ final public static int DEFAULT_INITIAL_CAPACITY = 1024; /** * A non-negative integer specifying the #of bytes of data in the buffer * that contain valid data starting from position zero(0). */ protected int len; /** * The buffer. This is re-allocated whenever the capacity of the buffer * is too small and reused otherwise. */ protected byte[] buf; /** * Throws exception unless the value is non-negative. * * @param msg * The exception message. * @param v * The value. * * @return The value. * * @exception IllegalArgumentException * unless the value is non-negative. */ protected static int assertNonNegative(String msg,int v) { if(v<0) throw new IllegalArgumentException(msg); return v; } /** * Uses an initial buffer capacity of <code>1024</code> bytes. */ public DataOutputBuffer() { this(DEFAULT_INITIAL_CAPACITY); } /** * @param initialCapacity * The initial capacity of the internal byte[]. */ public DataOutputBuffer(int initialCapacity) { this(0, new byte[assertNonNegative("initialCapacity", initialCapacity)]); } /** * @param len * The #of bytes of data in the provided buffer. * @param buf * The buffer, with <i>len</i> pre-existing bytes of valid data. * The buffer reference is used directly rather than making a * copy of the data. */ public DataOutputBuffer(int len, byte[] buf) { if (len < 0) throw new IllegalArgumentException("len"); if (buf == null) throw new IllegalArgumentException("buf"); if (len > buf.length) throw new IllegalArgumentException("len>buf.length"); this.len = len; this.buf = buf; } /** * The #of bytes of data in the key. */ final public int getLength() { return len; } /** * Ensure that at least <i>len</i> bytes are free in the buffer. The * {@link #buf buffer} may be grown by this operation but it will not be * truncated. * <p> * This operation is equivilent to * * <pre> * ensureCapacity(this.len + len) * </pre> * * and the latter is often used as an optimization. * * @param len * The minimum #of free bytes. */ final public void ensureFree(int len) { ensureCapacity(this.len + len ); } /** * Ensure that the buffer capacity is a least <i>capacity</i> total bytes. * The {@link #buf buffer} may be grown by this operation but it will not be * truncated. * * @param capacity * The minimum #of bytes in the buffer. */ final public void ensureCapacity(int capacity) { if(capacity<0) throw new IllegalArgumentException(); // assert capacity >= 0; final int overflow = capacity - buf.length; if(overflow>0) { /* * extend to at least the target capacity. */ final byte[] tmp = new byte[extend(capacity)]; // copy only the defined bytes. System.arraycopy(buf, 0, tmp, 0, this.len); buf = tmp; } } /** * Return the new capacity for the buffer (default is always large enough * and will normally double the buffer capacity each time it overflows). * * @param required * The minimum required capacity. * * @return The new capacity. */ protected int extend(int required) { return Math.max(required, buf.length * 2); } /** * Return a copy of the buffer. * * @return A new array containing data in the buffer. * * @see #wrap() */ final public byte[] toByteArray() { byte[] tmp = new byte[this.len]; System.arraycopy(buf, 0, tmp, 0, this.len); return tmp; } /** * Wraps up a reference to the data in a {@link ByteBuffer} * * @return A {@link ByteBuffer} encapsulating a reference to the data in the * current buffer. The data will be overwritten if {@link #reset()} * is invoked followed by any operations that write on the buffer. */ final public ByteBuffer wrap() { return ByteBuffer.wrap(buf, 0, len); } // /** // * Copy the data from the internal buffer into the supplied buffer. // * // * @param b // * A byte[]. // * // * @exception IndexOutOfBoundsException // * if the supplied buffer is not large enough. // */ // final public void copy(byte[] b) { // // System.arraycopy(this.buf, 0, b, 0, this.len); // // } /** * Prepares the buffer for new data by resetting the length to zero. * * @return This buffer. */ final public DataOutputBuffer reset() { len = 0; return this; } final public void write(int b) throws IOException { if (len + 1 > buf.length) ensureCapacity(len + 1); buf[len++] = (byte) (b & 0xff); } final public void write(byte[] b) throws IOException { write(b,0,b.length); } final public void write(byte[] b, int off, int len) throws IOException { ensureFree(len); System.arraycopy(b, off, buf, this.len, len); this.len += len; } final public void writeBoolean(boolean v) throws IOException { if (len + 1 > buf.length) ensureCapacity(len + 1); buf[len++] = v ? (byte)1 : (byte)0; } final public void writeByte(int v) throws IOException { if (len + 1 > buf.length) ensureCapacity(len + 1); buf[len++] = (byte) (v & 0xff); } final public void writeDouble(double d) throws IOException { if (len + 8 > buf.length) ensureCapacity(len + 8); long v = Double.doubleToLongBits(d); // big-endian. buf[len++] = (byte) (v >>> 56); buf[len++] = (byte) (v >>> 48); buf[len++] = (byte) (v >>> 40); buf[len++] = (byte) (v >>> 32); buf[len++] = (byte) (v >>> 24); buf[len++] = (byte) (v >>> 16); buf[len++] = (byte) (v >>> 8); buf[len++] = (byte) (v >>> 0); } final public void writeFloat(float f) throws IOException { if (len + 4 > buf.length) ensureCapacity(len + 4); int v = Float.floatToIntBits(f); buf[len++] = (byte) (v >>> 24); buf[len++] = (byte) (v >>> 16); buf[len++] = (byte) (v >>> 8); buf[len++] = (byte) (v >>> 0); } final public void writeInt(int v) throws IOException { if (len + 4 > buf.length) ensureCapacity(len + 4); buf[len++] = (byte) (v >>> 24); buf[len++] = (byte) (v >>> 16); buf[len++] = (byte) (v >>> 8); buf[len++] = (byte) (v >>> 0); } final public void writeLong(long v) throws IOException { if (len + 8 > buf.length) ensureCapacity(len + 8); // big-endian. buf[len++] = (byte) (v >>> 56); buf[len++] = (byte) (v >>> 48); buf[len++] = (byte) (v >>> 40); buf[len++] = (byte) (v >>> 32); buf[len++] = (byte) (v >>> 24); buf[len++] = (byte) (v >>> 16); buf[len++] = (byte) (v >>> 8); buf[len++] = (byte) (v >>> 0); } final public void writeShort(int v) throws IOException { if (len + 2 > buf.length) ensureCapacity(len + 2); // big-endian buf[len++] = (byte) (v >>> 8); buf[len++] = (byte) (v >>> 0); } final public void writeChar(int v) throws IOException { if (len + 2 > buf.length) ensureCapacity(len + 2); buf[len++] = (byte) (v >>> 8); buf[len++] = (byte) (v >>> 0); } public void writeBytes(String s) throws IOException { int len = s.length(); for (int i = 0 ; i < len ; i++) { write((byte)s.charAt(i)); } } public void writeChars(String s) throws IOException { int len = s.length(); for (int i = 0 ; i < len ; i++) { int v = s.charAt(i); write((v >>> 8) & 0xFF); write((v >>> 0) & 0xFF); } } /** * Note: This is not wildly efficient (it would be fine if * DataOutputStream#writeUTF(String str, DataOutput out)} was public) but * the use cases for serializing the nodes and leaves of a btree do not * suggest any requirement for Unicode (if you assume that the application * values are already being serialized as byte[]s - which is always true * when there is a client-server divide). */ public void writeUTF(String str) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); dos.writeUTF(str); dos.flush(); write(baos.toByteArray()); } /* * Pack unsigned long integer. */ /** * Packs a non-negative long value into the minimum #of bytes in which the * value can be represented and writes those bytes onto the output stream. * The first byte determines whether or not the long value was packed and, * if packed, how many bytes were required to represent the packed long * value. When the high bit of the first byte is a one (1), then the long * value could not be packed and the long value is found by clearing the * high bit and interpreting the first byte plus the next seven (7) bytes as * a long. Otherwise the next three (3) bits are interpreted as an unsigned * integer giving the #of bytes (nbytes) required to represent the packed * long value. To recover the long value the high nibble is cleared and the * first byte together with the next nbytes are interpeted as an unsigned * long value whose leading zero bytes were not written. * * <pre> * * [0|1|2|3|4|5|6|7] * 1 - - - nbytes = 8, clear high bit and interpret this plus the next 7 bytes as a long. * 0 1 1 1 nbytes = 7, clear high nibble and interpret this plus the next 6 bytes as a long. * 0 1 1 0 nbytes = 6, clear high nibble and interpret this plus the next 5 bytes as a long. * 0 1 0 1 nbytes = 5, clear high nibble and interpret this plus the next 4 bytes as a long. * 0 1 0 0 nbytes = 4, clear high nibble and interpret this plus the next 3 bytes as a long. * 0 0 1 1 nbytes = 3, clear high nibble and interpret this plus the next 3 bytes as a long. * 0 0 1 0 nbytes = 2, clear high nibble and interpret this plus the next byte as a long. * 0 0 0 1 nbytes = 1, clear high nibble. value is the low nibble. * * </pre> * * @param v The unsigned long value. * * @return The #of bytes onto which the unsigned long value was packed. */ final public int packLong( long v ) throws IOException { /* * You can only pack non-negative long values with this method. */ if (v < 0) { throw new IllegalArgumentException("negative value: v=" + v); } /* * If the high byte is non-zero then we will write the value as a normal * long and return nbytes == 8. This case handles large positive long * values. */ if( ( v >> 56 ) != 0 ) { pbuf[0] = ( (byte)((0xff & (v >> 56))|0x80) ); // note: set the high bit. pbuf[1] = ( (byte)(0xff & (v >> 48)) ); pbuf[2] = ( (byte)(0xff & (v >> 40)) ); pbuf[3] = ( (byte)(0xff & (v >> 32)) ); pbuf[4] = ( (byte)(0xff & (v >> 24)) ); pbuf[5] = ( (byte)(0xff & (v >> 16)) ); pbuf[6] = ( (byte)(0xff & (v >> 8)) ); pbuf[7] = ( (byte)(0xff & v) ); write(pbuf, 0, 8); return 8; } // #of nibbles required to represent the long value. final int nnibbles = getNibbleLength( v ); /* * Is [nnibbles] even? (If it is even then we need to pad out an extra * zero nibble in the first byte.) */ final boolean evenNibbleCount = ( nnibbles == ( ( nnibbles >> 1 ) << 1 ) ); // #of bytes required to represent the long value (plus the header nibble). final int nbytes = ( ( nnibbles +1 ) >> 1 ) + (evenNibbleCount?1:0); int nwritten = 0; if( evenNibbleCount ) { /* * An even nibble count requires that we pad the low nibble of the * first byte with zeros. */ // header byte. low nibble is empty. byte b = (byte) ( nbytes << 4 ); pbuf[nwritten++] = b; // remaining bytes containing the packed value. for( int i=(nnibbles-2)<<2; i>=0; i-=8 ) { b = (byte) (0xff & (v >> i)); pbuf[nwritten++] = b; } } else { /* * An odd nibble count means that we pack the first nibble of the * long value into the low nibble of the header byte. In this case * the first nibble will always be the low nibble of the first * non-zero byte in the long value (the high nibble of that byte * must be zero since there is an odd nibble count). */ byte highByte = (byte) (0xff & (v >> ((nbytes-1)*8) )); byte b = (byte) ( ( nbytes << 4 ) | highByte ); pbuf[nwritten++] = b; for( int i=(nnibbles-3)<<2; i>=0; i-=8 ) { b = (byte) (0xff & (v >> i)); pbuf[nwritten++] = b; } } write(pbuf,0,nwritten); return nwritten; } /** * Private buffer for packing long integers. */ private byte[] pbuf = new byte[8]; /** * Return the #of non-zero nibbles, counting from the first non-zero nibble * in the long value. A value of <code>0L</code> is considered to be one * nibble for our purposes. * * @param v * The long value. * * @return The #of nibbles in [1:16]. */ static protected final int getNibbleLength( long v ) { for( int i=56, j=16; i>=0; i-=8, j-=2 ) { if( (0xf0 & (v >> i)) != 0 ) return j; if( (0x0f & (v >> i)) != 0 ) return j-1; } if (v != 0) throw new AssertionError("v=" + v); /* * value is zero, which is considered to be one nibble for our purposes. */ return 1; } /* * Pack unsigned short integer. */ /** * Packs a non-negative short value into one or two bytes and writes them on * <i>os </i>. A short in [0:127] is packed into one byte. Larger values are * packed into two bytes. The high bit of the first byte is set if the value * was packed into two bytes. If the bit is set, clear the high bit, read * the next byte, and interpret the two bytes as a short value. Otherwise * interpret the byte as a short value. * * @param v The unsigned short integer. * * @return The #of bytes into which the value was packed. */ final public int packShort( short v ) throws IOException { /* * You can only pack non-negative values with this method. */ if( v < 0 ) { throw new IllegalArgumentException( "negative value: v="+v ); } if( v > 127 ) { // the value requires two bytes. buf[len++] = ( (byte)((0xff & (v >> 8))|0x80) ); // note: set the high bit. buf[len++] = ( (byte)(0xff & v) ); return 2; } else { // the value fits in one byte. buf[len++] = ( (byte)(0xff & v) ); return 1; } } } Index: IndexSegment.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/IndexSegment.java,v retrieving revision 1.19 retrieving revision 1.20 diff -C2 -d -r1.19 -r1.20 *** IndexSegment.java 27 Mar 2007 14:34:22 -0000 1.19 --- IndexSegment.java 10 Apr 2007 18:33:31 -0000 1.20 *************** *** 100,107 **** BTree.DEFAULT_HARD_REF_QUEUE_SCAN)); - // report on the event. - ResourceManager.openIndexSegment(null/* name */, fileStore.getFile() - .toString(), fileStore.size()); - } --- 100,103 ---- *************** *** 142,145 **** --- 138,145 ---- _open(); + // report on the event. + ResourceManager.openIndexSegment(null/* name */, fileStore.getFile() + .toString(), fileStore.size()); + } *************** *** 311,314 **** --- 311,315 ---- } else { + _key = unbox(key); Index: Leaf.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/Leaf.java,v retrieving revision 1.23 retrieving revision 1.24 diff -C2 -d -r1.23 -r1.24 *** Leaf.java 13 Feb 2007 23:01:02 -0000 1.23 --- Leaf.java 10 Apr 2007 18:33:31 -0000 1.24 *************** *** 48,51 **** --- 48,52 ---- import java.io.PrintStream; + import java.lang.ref.WeakReference; import java.util.Arrays; import java.util.Iterator; *************** *** 775,779 **** btree.touch(this); ! final int entryIndex = keys.search(searchKey); --- 776,780 ---- btree.touch(this); ! final int entryIndex = keys.search(searchKey); *************** *** 795,799 **** * Looks up one or more tuples and reports whether or not they exist. * ! - * @return The #of tuples processed. * * @todo optimize batch lookup here (also when bloom filter is available on --- 796,800 ---- * Looks up one or more tuples and reports whether or not they exist. * ! * @return The #of tuples processed. * * @todo optimize batch lookup here (also when bloom filter is available on Index: AbstractBTree.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/AbstractBTree.java,v retrieving revision 1.22 retrieving revision 1.23 diff -C2 -d -r1.22 -r1.23 *** AbstractBTree.java 4 Apr 2007 16:52:16 -0000 1.22 --- AbstractBTree.java 10 Apr 2007 18:33:31 -0000 1.23 *************** *** 49,52 **** --- 49,53 ---- import java.io.PrintStream; + import java.lang.ref.WeakReference; import java.nio.ByteBuffer; import java.util.Iterator; *************** *** 132,141 **** * flag turns on some more expensive assertions. */ ! final protected boolean debug = DEBUG||true; /** * Counters tracking various aspects of the btree. */ ! /*protected*/ public final Counters counters = new Counters(this); /** --- 133,142 ---- * flag turns on some more expensive assertions. */ ! final protected boolean debug = DEBUG || true; /** * Counters tracking various aspects of the btree. */ ! /* protected */public final Counters counters = new Counters(this); /** *************** *** 153,157 **** */ final protected UUID indexUUID; ! /** * The branching factor for the btree. --- 154,158 ---- */ final protected UUID indexUUID; ! /** * The branching factor for the btree. *************** *** 175,179 **** */ protected AbstractNode root; ! /** * Used to serialize and de-serialize the nodes and leaves of the tree. --- 176,205 ---- */ protected AbstractNode root; ! ! // /** ! // * The finger is a trial feature. The purpose is to remember the last ! // * leaf(s) in the tree that was visited by a search operation and to ! // * pre-test that those leaf(s) on the next search operation. ! // * <p> ! // * Fingers can do wonders by reducing search in common cases where the same ! // * leaf is visited by a sequence of operations, e.g., sequential insert or ! // * conditional insert realized by lookup + insert. Multiple fingers may be ! // * required if the tree is used by concurrent readers (and the finger might ! // * be part of the reader state) while a single finger would do for a tree ! // * being used by a writer (since concurrent modification is not supported). ! // * <p> ! // * The finger is set each time a leaf is found as the result of a search. It ! // * is tested by each search operation before walking the tree. The finger is ! // * a {@link WeakReference} so that we do not need to worry about having a ! // * hard reference to an arbitrary part of the tree. ! // * ! // * @todo Does the finger need to encapsulate the hard references for the ! // * parents in order to ensure that the parents remain strongly ! // * reachable? (Parents are only weakly reachable from their children.) ! // */ ! // private WeakReference<Leaf> finger; ! // ! // final private boolean useFinger = false; ! /** * Used to serialize and de-serialize the nodes and leaves of the tree. *************** *** 193,197 **** */ public int referenceCount = 0; ! /** * Leaves (and nodes) are added to a hard reference queue when they are --- 219,223 ---- */ public int referenceCount = 0; ! /** * Leaves (and nodes) are added to a hard reference queue when they are *************** *** 343,348 **** assert nodeFactory != null; ! if(indexUUID == null) throw new IllegalArgumentException("indexUUID"); ! this.store = store; --- 369,375 ---- assert nodeFactory != null; ! if (indexUUID == null) ! throw new IllegalArgumentException("indexUUID"); ! this.store = store; *************** *** 356,360 **** this.indexUUID = indexUUID; ! } --- 383,387 ---- this.indexUUID = indexUUID; ! } *************** *** 366,370 **** * store). The index reference remains valid after a {@link #close()}. A * closed index is transparently restored by either {@link #getRoot()} or ! * {@link #reopen()}. * <p> * This implementation clears the hard reference queue (releasing all node --- 393,397 ---- * store). The index reference remains valid after a {@link #close()}. A * closed index is transparently restored by either {@link #getRoot()} or ! * {@link #reopen()}. * <p> * This implementation clears the hard reference queue (releasing all node *************** *** 389,409 **** public void close() { ! if(root==null) { throw new IllegalStateException("Already closed"); ! } if (root.dirty) { ! throw new IllegalStateException("Root node is dirty"); ! } ! /* * Release buffers. */ nodeSer.close(); ! /* * Clear the hard reference queue (this will not trigger any writes --- 416,436 ---- public void close() { ! if (root == null) { throw new IllegalStateException("Already closed"); ! } if (root.dirty) { ! throw new IllegalStateException("Root node is dirty"); ! } ! /* * Release buffers. */ nodeSer.close(); ! /* * Clear the hard reference queue (this will not trigger any writes *************** *** 411,422 **** */ leafQueue.evictAll(true); ! /* * Clear the reference to the root node (permits GC). */ root = null; ! } ! /** * This is part of a {@link #close()}/{@link #reopen()} protocol that may --- 438,449 ---- */ leafQueue.evictAll(true); ! /* * Clear the reference to the root node (permits GC). */ root = null; ! } ! /** * This is part of a {@link #close()}/{@link #reopen()} protocol that may *************** *** 432,436 **** */ abstract protected void reopen(); ! /** * An "open" index has its buffers and root node in place rather than having --- 459,463 ---- */ abstract protected void reopen(); ! /** * An "open" index has its buffers and root node in place rather than having *************** *** 444,452 **** */ final public boolean isOpen() { ! return root != null; ! } ! /** * The backing store. --- 471,479 ---- */ final public boolean isOpen() { ! return root != null; ! } ! /** * The backing store. *************** *** 515,523 **** */ final public UUID getIndexUUID() { ! return indexUUID; ! } ! /** * The root of the btree. This is initially a leaf until the leaf is split, --- 542,550 ---- */ final public UUID getIndexUUID() { ! return indexUUID; ! } ! /** * The root of the btree. This is initially a leaf until the leaf is split, *************** *** 532,537 **** // make sure that the root is defined. ! if(root == null) reopen(); ! return root; --- 559,565 ---- // make sure that the root is defined. ! if (root == null) ! reopen(); ! return root; *************** *** 550,572 **** */ public void addAll(AbstractBTree src) { ! ! if(src==null) throw new IllegalArgumentException(); ! ! if(src==this) throw new IllegalArgumentException(); ! IEntryIterator itr = src.entryIterator(); ! ! while(itr.hasNext()) { ! Object val = itr.next(); ! byte[] key = itr.getKey(); ! ! insert(key,val); ! } ! } ! public void insert(BatchInsert op) { --- 578,602 ---- */ public void addAll(AbstractBTree src) { ! ! if (src == null) ! throw new IllegalArgumentException(); ! ! if (src == this) ! throw new IllegalArgumentException(); ! IEntryIterator itr = src.entryIterator(); ! ! while (itr.hasNext()) { ! Object val = itr.next(); ! byte[] key = itr.getKey(); ! ! insert(key, val); ! } ! } ! public void insert(BatchInsert op) { *************** *** 639,646 **** public void contains(BatchContains op) { ! final int ntuples = op.ntuples; ! ! while( op.tupleIndex < ntuples ) { // skip tuples already marked as true. --- 669,676 ---- public void contains(BatchContains op) { ! final int ntuples = op.ntuples; ! ! while (op.tupleIndex < ntuples) { // skip tuples already marked as true. *************** *** 669,676 **** public void remove(BatchRemove op) { ! final int ntuples = op.ntuples; ! ! while( op.tupleIndex < ntuples) { /* --- 699,706 ---- public void remove(BatchRemove op) { ! final int ntuples = op.ntuples; ! ! while (op.tupleIndex < ntuples) { /* *************** *** 710,716 **** */ final protected byte[] unbox(Object key) { ! return keyBuilder.reset().append(((Integer) key).intValue()).getKey(); ! } --- 740,820 ---- */ final protected byte[] unbox(Object key) { ! return keyBuilder.reset().append(((Integer) key).intValue()).getKey(); ! ! } ! ! /** ! * Returns the node or leaf to be used for search. This implementation is ! * aware of the {@link #finger} and will return it if the key lies within ! * the finger. ! * ! * @param key ! * The key. ! * ! * @return Either the root node of the tree or a recently touched leaf that ! * is known to span the key. ! */ ! protected AbstractNode getRootOrFinger(byte[] key) { ! ! return getRoot(); ! // ! // if (finger == null) ! // return getRoot(); ! // ! // Leaf leaf = finger.get(); ! // ! // if (leaf == null || leaf.deleted) { ! // ! // // @todo clear finger on delete/copyOnWrite ! // ! // return getRoot(); ! // ! // } ! // ! // int entryIndex = leaf.keys.search(key); ! // ! // if (entryIndex >= 0) { ! // ! // /* ! // * There is an exact match on the finger so we know that this key ! // * belongs in this leaf. ! // */ ! // ! // return leaf; ! // ! // } ! // ! // if (entryIndex < 0) { ! // ! // // Convert the position to obtain the insertion point. ! // entryIndex = -entryIndex - 1; ! // ! // if (entryIndex > 0 && entryIndex < leaf.nkeys - 1) { ! // ! // /* ! // * The key belongs somewhere in between the first and the last ! // * key in the leaf. For all those positions, we are again ! // * guarenteed that this key belongs within this leaf. ! // */ ! // ! // return leaf; ! // ! // } ! // ! // } ! // ! // /* ! // * The key might belong in the leaf, but we are not sure since it lies ! // * on either the low or the high edge of the leaf. ! // * ! // * @todo We could disambiguate this using the actual value of the ! // * separator keys on the parent. If the key is greater than or equal to ! // * the separatorKey for this leaf and strictly less than the ! // * separatorKey for the rightSibling, then it belongs in this leaf. ! // */ ! // ! // return getRoot(); ! } *************** *** 722,735 **** counters.ninserts++; ! if (key instanceof byte[]) { ! ! return getRoot().insert((byte[]) key,value); ! ! } else { ! ! return getRoot().insert( unbox(key), value ); ! } ! } --- 826,837 ---- counters.ninserts++; ! if (!(key instanceof byte[])) { ! ! key = unbox(key); ! } ! ! return getRootOrFinger((byte[]) key).insert((byte[]) key, value); ! } *************** *** 741,754 **** counters.nfinds++; ! if (key instanceof byte[]) { - return getRoot().lookup((byte[])key); - - } else { - - return getRoot().lookup(unbox(key)); - } } --- 843,854 ---- counters.nfinds++; ! if (!(key instanceof byte[])) { ! ! key = unbox(key); } + return getRootOrFinger((byte[]) key).lookup((byte[]) key); + } *************** *** 759,764 **** counters.nfinds++; ! ! return getRoot().contains((byte[])key); } --- 859,864 ---- counters.nfinds++; ! ! return getRootOrFinger((byte[]) key).contains((byte[]) key); } *************** *** 771,784 **** counters.nremoves++; ! if (key instanceof byte[]) { ! ! return getRoot().remove((byte[])key); ! ! } else { ! ! return getRoot().remove(unbox(key)); ! } } --- 871,882 ---- counters.nremoves++; ! if (!(key instanceof byte[])) { ! ! key = unbox(key); ! } + return getRootOrFinger((byte[]) key).remove((byte[]) key); + } *************** *** 790,794 **** counters.nindexOf++; ! int index = getRoot().indexOf(key); return index; --- 888,892 ---- counters.nindexOf++; ! int index = getRootOrFinger((byte[]) key).indexOf(key); return index; *************** *** 839,843 **** AbstractNode root = getRoot(); ! int fromIndex = (fromKey == null ? 0 : root.indexOf(fromKey)); --- 937,941 ---- AbstractNode root = getRoot(); ! int fromIndex = (fromKey == null ? 0 : root.indexOf(fromKey)); *************** *** 983,990 **** if (root != null) { ! return root.dump(level, out, 0, true); ! ! } else return true; } --- 1081,1089 ---- if (root != null) { ! return root.dump(level, out, 0, true); ! ! } else ! return true; } *************** *** 1059,1062 **** --- 1158,1162 ---- * Also see {@link DefaultEvictionListener}. */ + if (node.referenceCount == 1) { *************** *** 1067,1070 **** --- 1167,1180 ---- } + // if (useFinger && node instanceof ILeafData) { + // + // if (finger == null || finger.get() != node) { + // + // finger = new WeakReference<Leaf>((Leaf) node); + // + // } + // + // } + } *************** *** 1267,1276 **** protected AbstractNode readNodeOrLeaf(long addr) { ! // /* ! // * offer the node serializer's buffer to the IRawStore. it will be used ! // * iff it is large enough and the store does not prefer to return a ! // * read-only slice. ! // */ ! // ByteBuffer tmp = store.read(addr, nodeSer._buf); ByteBuffer tmp = store.read(addr); assert tmp.position() == 0; --- 1377,1387 ---- protected AbstractNode readNodeOrLeaf(long addr) { ! // /* ! // * offer the node serializer's buffer to the IRawStore. it will be ! // used ! // * iff it is large enough and the store does not prefer to return a ! // * read-only slice. ! // */ ! // ByteBuffer tmp = store.read(addr, nodeSer._buf); ByteBuffer tmp = store.read(addr); assert tmp.position() == 0; --- NEW FILE: DataInputBuffer.java --- /** The Notice below must appear in each file of the Source Code of any copy you distribute of the Licensed Product. Contributors to any Modifications may add their own copyright notices to identify their own contributions. License: The contents of this file are subject to the CognitiveWeb Open Source License Version 1.1 (the License). You may not copy or use this file, in either source code or executable form, except in compliance with the License. You may obtain a copy of the License from http://www.CognitiveWeb.org/legal/license/ Software distributed under the License is distributed on an AS IS basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the specific language governing rights and limitations under the License. Copyrights: Portions created by or assigned to CognitiveWeb are Copyright (c) 2003-2003 CognitiveWeb. All Rights Reserved. Contact information for CognitiveWeb is available at http://www.CognitiveWeb.org Portions Copyright (c) 2002-2003 Bryan Thompson. Acknowledgements: Special thanks to the developers of the Jabber Open Source License 1.0 (JOSL), from which this License was derived. This License contains terms that differ from JOSL. Special thanks to the CognitiveWeb Open Source Contributors for their suggestions and support of the Cognitive Web. Modifications: */ /* * Created on Apr 9, 2007 */ package com.bigdata.objndx; import java.io.DataInput; import java.io.DataInputStream; import java.io.EOFException; import java.io.IOException; /** * A fast implementation of DataInput designed to read from a byte[]. * * @see DataOutputBuffer * * @see DataInputStream * * @author <a href="mailto:tho...@us...">Bryan Thompson</a> * @version $Id$ */ public class DataInputBuffer implements DataInput { /** * The buffer whose contents are being read. */ final protected byte[] buf; /** * The current offset in the buffer. This is incremented each time * any data is read from the buffer. */ protected int off; /** * The index of the last byte in the buffer having valid data. */ final protected int len; public DataInputBuffer(byte[] buf) { if(buf==null) throw new IllegalArgumentException(); this.buf = buf; this.off = 0; this.len = buf.length; } /* * DataInput */ public boolean readBoolean() throws IOException { if(off>=len) throw new EOFException(); return buf[off++] == 1 ? true : false; } public byte readByte() throws IOException { if (off >= len) throw new EOFException(); return buf[off++]; } public char readChar() throws IOException { if (off + 2 > len) throw new EOFException(); int ch1 = buf[off++]; int ch2 = buf[off++]; return (char) ((ch1 << 8) + (ch2 << 0)); } public double readDouble() throws IOException { return Double.longBitsToDouble(readLong()); } public float readFloat() throws IOException { return Float.intBitsToFloat(readInt()); } final public void readFully(byte[] b) throws IOException { readFully(b, 0, b.length); } final public void readFully(byte[] b, final int off, final int len) throws IOException { if (this.off + len > this.len) throw new EOFException(); System.arraycopy(buf, this.off, b, off, len); this.off += len; } public int readInt() throws IOException { if(off+4>len) throw new EOFException(); int v = 0; // big-endian. v += (0xffL & buf[off++]) << 24; v += (0xffL & buf[off++]) << 16; v += (0xffL & buf[off++]) << 8; v += (0xffL & buf[off++]) << 0; return v; } public String readLine() throws IOException { throw new UnsupportedOperationException(); } public long readLong() throws IOException { if(off+8>len) throw new EOFException(); long v = 0L; // big-endian. v += (0xffL & buf[off++]) << 56; v += (0xffL & buf[off++]) << 48; v += (0xffL & buf[off++]) << 40; v += (0xffL & buf[off++]) << 32; v += (0xffL & buf[off++]) << 24; v += (0xffL & buf[off++]) << 16; v += (0xffL & buf[off++]) << 8; v += (0xffL & buf[off++]) << 0; return v; } public short readShort() throws IOException { if (off + 2 > len) throw new EOFException(); int ch1 = buf[off++]; int ch2 = buf[off++]; return (short) ((ch1 << 8) + (ch2 << 0)); } public String readUTF() throws IOException { return DataInputStream.readUTF(this); } public int readUnsignedByte() throws IOException { if (off >= len) throw new EOFException(); return buf[off++]; } public int readUnsignedShort() throws IOException { if (off + 2 > len) throw new EOFException(); int ch1 = buf[off++]; int ch2 = buf[off++]; return ((ch1 << 8) + (ch2 << 0)); } public int skipBytes(int n) throws IOException { off += n; if(off>len) throw new IOException(); return n; } /* * unpack unsigned long integer. */ /** * Unpack a long value from the current buffer position. * * @return The long value. * * @throws IOException */ final public long unpackLong() throws IOException { int b = buf[off++]; int nbytes; long l; if ((b & 0x80) != 0) { // high bit is set. nbytes = 8; // use 8 bytes (this one plus the next 7). l = b & 0x7f; // clear the high bit - the rest of the byte is the // start value. } else { // high bit is clear. nbytes = b >> 4; // nbytes is the upper nibble. (right shift one // nibble). l = b & 0x0f; // starting value is lower nibble (clear the upper // nibble). } for (int i = 1; i < nbytes; i++) { // Read the next byte. b = buf[off++]; // Shift the existing value one byte left and add into the low // (unsigned) byte. l = (l << 8) + (0xff & b); } return l; } /* * unpack unsigned short integer. */ /** * Unpack a non-negative short value from the input stream. * * @param is * The input stream. * * @return The short value. * * @throws IOException */ final public short unpackShort() throws IOException { short b = (short) buf[off++]; short v; if ((b & 0x80) != 0) { // high bit is set. /* * clear the high bit and shift over one byte. */ v = (short) ((b & 0x7f) << 8); b = buf[off++]; // read the next byte. v |= (b & 0xff); // and combine it together with the high byte. } else { // high bit is clear. v = b; // interpret the byte as a short value. } return (short) v; } } Index: NodeSerializer.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/NodeSerializer.java,v retrieving revision 1.34 retrieving revision 1.35 diff -C2 -d -r1.34 -r1.35 *** NodeSerializer.java 15 Mar 2007 16:11:08 -0000 1.34 --- NodeSerializer.java 10 Apr 2007 18:33:31 -0000 1.35 *************** *** 47,51 **** --- 47,53 ---- package com.bigdata.objndx; + import java.io.ByteArrayOutputStream; import java.io.DataInputStream; + import java.io.DataOutput; import java.io.DataOutputStream; import java.io.EOFException; *************** *** 97,100 **** --- 99,103 ---- * interfaces is encouraged. * </p> + * * @author <a href="mailto:tho...@us...">Bryan Thompson</a> * @version $Id$ |