|
From: Bryan T. <tho...@us...> - 2007-04-12 23:59:53
|
Update of /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx In directory sc8-pr-cvs4.sourceforge.net:/tmp/cvs-serv2548/src/java/com/bigdata/objndx Modified Files: PackedAddressSerializer.java NodeSerializer.java DataOutputBuffer.java AddressSerializer.java ByteArrayValueSerializer.java IValueSerializer.java KeyBufferSerializer.java IAddressSerializer.java IndexSegment.java IKeySerializer.java Log Message: Added a Sesame 1.x SAIL implementation. This is NOT intended for production use. It is just being done to gain a high-level query language integration for the triple store. Index: IAddressSerializer.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/IAddressSerializer.java,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** IAddressSerializer.java 26 Dec 2006 16:16:14 -0000 1.1 --- IAddressSerializer.java 12 Apr 2007 23:59:35 -0000 1.2 *************** *** 48,53 **** package com.bigdata.objndx; ! import java.io.DataInputStream; ! import java.io.DataOutputStream; import java.io.IOException; --- 48,53 ---- package com.bigdata.objndx; ! import java.io.DataInput; ! import java.io.DataOutput; import java.io.IOException; *************** *** 85,89 **** * written on the array. */ ! public void getChildAddresses(DataInputStream is, long[] childAddr, int nchildren) throws IOException; --- 85,89 ---- * written on the array. */ ! public void getChildAddresses(DataInput is, long[] childAddr, int nchildren) throws IOException; *************** *** 99,103 **** * [0:n-1] are defined and must be written. */ ! public void putChildAddresses(DataOutputStream os, long[] childAddr, int nchildren) throws IOException; --- 99,103 ---- * [0:n-1] are defined and must be written. */ ! public void putChildAddresses(DataOutputBuffer os, long[] childAddr, int nchildren) throws IOException; Index: IValueSerializer.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/IValueSerializer.java,v retrieving revision 1.6 retrieving revision 1.7 diff -C2 -d -r1.6 -r1.7 *** IValueSerializer.java 13 Feb 2007 23:01:02 -0000 1.6 --- IValueSerializer.java 12 Apr 2007 23:59:34 -0000 1.7 *************** *** 48,53 **** package com.bigdata.objndx; ! import java.io.DataInputStream; ! import java.io.DataOutputStream; import java.io.IOException; import java.io.Serializable; --- 48,53 ---- package com.bigdata.objndx; ! import java.io.DataInput; ! import java.io.DataOutput; import java.io.IOException; import java.io.Serializable; *************** *** 74,78 **** * written on the array. */ ! public void getValues(DataInputStream is, Object[] values, int nvals) throws IOException; --- 74,78 ---- * written on the array. */ ! public void getValues(DataInput is, Object[] values, int nvals) throws IOException; *************** *** 88,92 **** * [0:n-1] are defined and must be written. */ ! public void putValues(DataOutputStream os, Object[] values, int nvals) throws IOException; --- 88,92 ---- * [0:n-1] are defined and must be written. */ ! public void putValues(DataOutputBuffer os, Object[] values, int nvals) throws IOException; Index: ByteArrayValueSerializer.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/ByteArrayValueSerializer.java,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** ByteArrayValueSerializer.java 15 Feb 2007 01:34:22 -0000 1.1 --- ByteArrayValueSerializer.java 12 Apr 2007 23:59:34 -0000 1.2 *************** *** 48,52 **** import java.io.ByteArrayOutputStream; ! import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; --- 48,53 ---- import java.io.ByteArrayOutputStream; ! import java.io.DataInput; ! import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; *************** *** 74,78 **** public static final transient int VERSION0 = 0x0; ! public void putValues(DataOutputStream os, Object[] values, int n) throws IOException { /* --- 75,79 ---- public static final transient int VERSION0 = 0x0; ! public void putValues(DataOutputBuffer os, Object[] values, int n) throws IOException { /* *************** *** 81,91 **** { ! final int size = 2 + n * 2; // est of buffer capacity. ! ByteArrayOutputStream baos = new ByteArrayOutputStream(size); ! ! DataOutputStream dbaos = new DataOutputStream(baos); ! ! LongPacker.packLong(dbaos,VERSION0); for (int i = 0; i < n; i++) { --- 82,94 ---- { ! // final int size = 2 + n * 2; // est of buffer capacity. ! // ! // ByteArrayOutputStream baos = new ByteArrayOutputStream(size); ! // ! // DataOutputStream dbaos = new DataOutputStream(baos); ! // ! // LongPacker.packLong(dbaos,VERSION0); ! os.packLong(VERSION0); for (int i = 0; i < n; i++) { *************** *** 97,107 **** // Note: we add (1) so that the length is always // non-negative so that we can pack it. ! LongPacker.packLong(dbaos,len+1); } ! dbaos.flush(); ! ! os.write(baos.toByteArray()); } --- 100,111 ---- // Note: we add (1) so that the length is always // non-negative so that we can pack it. ! // LongPacker.packLong(dbaos,len+1); ! os.packLong(len+1); } ! // dbaos.flush(); ! // ! // os.write(baos.toByteArray()); } *************** *** 124,128 **** } ! public void getValues(DataInputStream is, Object[] values, int n) throws IOException { final int version = (int)LongPacker.unpackLong(is); --- 128,132 ---- } ! public void getValues(DataInput is, Object[] values, int n) throws IOException { final int version = (int)LongPacker.unpackLong(is); *************** *** 159,163 **** if( value==null) continue; ! is.read(value, 0, value.length); } --- 163,167 ---- if( value==null) continue; ! is.readFully(value, 0, value.length); } Index: DataOutputBuffer.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/DataOutputBuffer.java,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** DataOutputBuffer.java 10 Apr 2007 18:33:31 -0000 1.1 --- DataOutputBuffer.java 12 Apr 2007 23:59:34 -0000 1.2 *************** *** 162,168 **** /** ! * The #of bytes of data in the key. */ ! final public int getLength() { return len; --- 162,168 ---- /** ! * The current position in the buffer. */ ! final public int position() { return len; *************** *** 171,174 **** --- 171,194 ---- /** + * Set the position in the buffer. + * + * @param pos + * The new position, must be in [0:capacity). + * + * @return The old position. + */ + final public int position(int pos) { + + if(pos<0 || pos>=buf.length) throw new IllegalArgumentException(); + + int v = this.len; + + this.len = pos; + + return v; + + } + + /** * Ensure that at least <i>len</i> bytes are free in the buffer. The * {@link #buf buffer} may be grown by this operation but it will not be *************** *** 234,238 **** protected int extend(int required) { ! return Math.max(required, buf.length * 2); } --- 254,263 ---- protected int extend(int required) { ! int capacity = Math.max(required, buf.length * 2); ! ! System.err.println("Extending buffer to capacity=" + capacity ! + " bytes."); ! ! return capacity; } *************** *** 451,460 **** /** ! * Note: This is not wildly efficient (it would be fine if ! * DataOutputStream#writeUTF(String str, DataOutput out)} was public) but ! * the use cases for serializing the nodes and leaves of a btree do not ! * suggest any requirement for Unicode (if you assume that the application ! * values are already being serialized as byte[]s - which is always true ! * when there is a client-server divide). */ public void writeUTF(String str) throws IOException { --- 476,488 ---- /** ! * @todo This is not wildly efficient (it would be fine if ! * DataOutputStream#writeUTF(String str, DataOutput out)} was public) ! * but the use cases for serializing the nodes and leaves of a btree ! * do not suggest any requirement for Unicode (if you assume that the ! * application values are already being serialized as byte[]s - which ! * is always true when there is a client-server divide). The RDF value ! * serializer does use this method right now, but that will be client ! * side code as soon we as refactor to isolate the client and the ! * server. */ public void writeUTF(String str) throws IOException { Index: IndexSegment.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/IndexSegment.java,v retrieving revision 1.20 retrieving revision 1.21 diff -C2 -d -r1.20 -r1.21 *** IndexSegment.java 10 Apr 2007 18:33:31 -0000 1.20 --- IndexSegment.java 12 Apr 2007 23:59:35 -0000 1.21 *************** *** 1,6 **** package com.bigdata.objndx; ! import java.io.DataInputStream; ! import java.io.DataOutputStream; import java.io.IOException; --- 1,6 ---- package com.bigdata.objndx; ! import java.io.DataInput; ! import java.io.DataOutput; import java.io.IOException; *************** *** 606,610 **** * to the conventions of this class. */ ! public void putChildAddresses(DataOutputStream os, long[] childAddr, int nchildren) throws IOException { --- 606,610 ---- * to the conventions of this class. */ ! public void putChildAddresses(DataOutputBuffer os, long[] childAddr, int nchildren) throws IOException { *************** *** 637,644 **** // write the adjusted offset (requires decoding). ! LongPacker.packLong(os, adjustedOffset); // write the #of bytes (does not require decoding). ! LongPacker.packLong(os, nbytes); } --- 637,646 ---- // write the adjusted offset (requires decoding). ! // LongPacker.packLong(os, adjustedOffset); ! os.packLong(adjustedOffset); // write the #of bytes (does not require decoding). ! // LongPacker.packLong(os, nbytes); ! os.packLong(nbytes); } *************** *** 649,653 **** * Unpacks and decodes the addresses. */ ! public void getChildAddresses(DataInputStream is, long[] childAddr, int nchildren) throws IOException { --- 651,655 ---- * Unpacks and decodes the addresses. */ ! public void getChildAddresses(DataInput is, long[] childAddr, int nchildren) throws IOException { Index: IKeySerializer.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/IKeySerializer.java,v retrieving revision 1.7 retrieving revision 1.8 diff -C2 -d -r1.7 -r1.8 *** IKeySerializer.java 26 Jan 2007 02:39:24 -0000 1.7 --- IKeySerializer.java 12 Apr 2007 23:59:35 -0000 1.8 *************** *** 48,53 **** package com.bigdata.objndx; ! import java.io.DataInputStream; ! import java.io.DataOutputStream; import java.io.IOException; import java.io.Serializable; --- 48,53 ---- package com.bigdata.objndx; ! import java.io.DataInput; ! import java.io.DataOutput; import java.io.IOException; import java.io.Serializable; *************** *** 69,73 **** * @return The keys. */ ! public IKeyBuffer getKeys(DataInputStream is) throws IOException; /** --- 69,73 ---- * @return The keys. */ ! public IKeyBuffer getKeys(DataInput is) throws IOException; /** *************** *** 80,84 **** * The keys from a {@link Leaf} or {@link Node}. */ ! public void putKeys(DataOutputStream os, IKeyBuffer keys) throws IOException; --- 80,84 ---- * The keys from a {@link Leaf} or {@link Node}. */ ! public void putKeys(DataOutputBuffer os, IKeyBuffer keys) throws IOException; Index: AddressSerializer.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/AddressSerializer.java,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** AddressSerializer.java 5 Feb 2007 18:17:39 -0000 1.2 --- AddressSerializer.java 12 Apr 2007 23:59:34 -0000 1.3 *************** *** 48,53 **** package com.bigdata.objndx; ! import java.io.DataInputStream; ! import java.io.DataOutputStream; import java.io.IOException; --- 48,53 ---- package com.bigdata.objndx; ! import java.io.DataInput; ! import java.io.DataOutput; import java.io.IOException; *************** *** 57,60 **** --- 57,62 ---- * Serializes each address as an unpacked long integer. * + * @see PackedAddressSerializer + * * @author <a href="mailto:tho...@us...">Bryan Thompson</a> * @version $Id$ *************** *** 75,79 **** } ! public void putChildAddresses(DataOutputStream os, long[] childAddr, int nchildren) throws IOException { --- 77,81 ---- } ! public void putChildAddresses(DataOutputBuffer os, long[] childAddr, int nchildren) throws IOException { *************** *** 98,102 **** } ! public void getChildAddresses(DataInputStream is, long[] childAddr, int nchildren) throws IOException { --- 100,104 ---- } ! public void getChildAddresses(DataInput is, long[] childAddr, int nchildren) throws IOException { Index: NodeSerializer.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/NodeSerializer.java,v retrieving revision 1.35 retrieving revision 1.36 diff -C2 -d -r1.35 -r1.36 *** NodeSerializer.java 10 Apr 2007 18:33:31 -0000 1.35 --- NodeSerializer.java 12 Apr 2007 23:59:34 -0000 1.36 *************** *** 47,51 **** package com.bigdata.objndx; - import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutput; --- 47,50 ---- *************** *** 103,106 **** --- 102,107 ---- * @version $Id$ * + * @todo modify deserialization to use a fast DataInput wrapping a byte[]? + * * @todo automatically resize the decompression buffers as required and start * with a smaller buffer. *************** *** 176,180 **** * retried. */ ! protected ByteBuffer _buf; /** --- 177,181 ---- * retried. */ ! protected DataOutputBuffer _buf; /** *************** *** 472,476 **** * @return The buffer. */ ! static protected ByteBuffer alloc(int capacity) { // return (true || capacity < Bytes.kilobyte32 * 8 )? ByteBuffer --- 473,477 ---- * @return The buffer. */ ! static protected DataOutputBuffer alloc(int capacity) { // return (true || capacity < Bytes.kilobyte32 * 8 )? ByteBuffer *************** *** 490,523 **** */ ! return ByteBuffer.allocate(capacity); ! } ! ! /** ! * Extends the internal buffer used to serialize nodes and leaves. ! * <p> ! * Note: large buffer requirements are not at all uncommon so we grow the ! * buffer rapidly to avoid multiple resizing and the expense of a too large ! * buffer. ! * ! * FIXME We can encapsulate the extension of the buffer within a class ! * derived from or using the {@link ByteBufferOutputStream} and simply copy ! * the data when we need to extend the buffer rather than restarting ! * serialization. This will make underestimates of the required buffer ! * capacity much less costly. ! */ ! protected void extendBuffer() { ! ! int capacity = _buf.capacity(); - capacity *= 2; - - System.err.println("Extending buffer to capacity=" + capacity - + " bytes."); - - _buf = alloc(capacity); - } /** * De-serialize a node or leaf. This method is used when the caller does not --- 491,526 ---- */ ! // return ByteBuffer.allocate(capacity); ! return new DataOutputBuffer(capacity); } + // /** + // * Extends the internal buffer used to serialize nodes and leaves. + // * <p> + // * Note: large buffer requirements are not at all uncommon so we grow the + // * buffer rapidly to avoid multiple resizing and the expense of a too large + // * buffer. + // * + // * FIXME We can encapsulate the extension of the buffer within a class + // * derived from or using the {@link ByteBufferOutputStream} and simply copy + // * the data when we need to extend the buffer rather than restarting + // * serialization. This will make underestimates of the required buffer + // * capacity much less costly. + // */ + // protected void extendBuffer() { + // + // int capacity = _buf.capacity(); + // + // capacity *= 2; + // + // System.err.println("Extending buffer to capacity=" + capacity + // + " bytes."); + // + // _buf = alloc(capacity); + // + // } + /** * De-serialize a node or leaf. This method is used when the caller does not *************** *** 658,678 **** } ! while (true) { ! try { return putNode(_buf, node); ! } catch (BufferOverflowException ex) { ! extendBuffer(); } ! ! } } ! private ByteBuffer putNode(ByteBuffer buf, INodeData node) { assert buf != null; --- 661,686 ---- } ! // while (true) { ! // try { return putNode(_buf, node); ! // ! } catch (IOException ex) { ! ! throw new RuntimeException(ex); // exception is not expected. ! } catch (BufferOverflowException ex) { ! throw ex; // exception is not expected. ! // extendBuffer(); } ! // ! // } } ! private ByteBuffer putNode(DataOutputBuffer buf, INodeData node) throws IOException { assert buf != null; *************** *** 690,727 **** */ ! buf.clear(); final int pos0 = buf.position(); // checksum ! buf.putInt(0); // will overwrite below with the checksum. // #bytes ! buf.putInt(0); // will overwrite below with the actual value. // nodeType ! buf.put(TYPE_NODE); // this is a non-leaf node. // version ! buf.putShort(VERSION0); ! /* ! * Setup output stream over the buffer. ! * ! * Note: I have tested the use of a {@link BufferedOutputStream} here ! * and in putLeaf() and it actually slows things down a smidge. ! */ ! DataOutputStream os = new DataOutputStream(// ! new ByteBufferOutputStream(buf) ! // new BufferedOutputStream(new ByteBufferOutputStream(buf)) ! ); try { // branching factor. ! LongPacker.packLong(os, branchingFactor); // #of spanned entries. ! LongPacker.packLong(os, nentries); // // #of keys --- 698,735 ---- */ ! buf.reset(); final int pos0 = buf.position(); // checksum ! buf.writeInt(0); // will overwrite below with the checksum. // #bytes ! buf.writeInt(0); // will overwrite below with the actual value. // nodeType ! buf.writeByte(TYPE_NODE); // this is a non-leaf node. // version ! buf.writeShort(VERSION0); ! // /* ! // * Setup output stream over the buffer. ! // * ! // * Note: I have tested the use of a {@link BufferedOutputStream} here ! // * and in putLeaf() and it actually slows things down a smidge. ! // */ ! // DataOutputStream os = new DataOutputStream(// ! // new ByteBufferOutputStream(buf) ! //// new BufferedOutputStream(new ByteBufferOutputStream(buf)) ! // ); try { // branching factor. ! buf.packLong( branchingFactor); // #of spanned entries. ! buf.packLong( nentries); // // #of keys *************** *** 729,742 **** // keys. ! keySerializer.putKeys(os, keys); // addresses. ! addrSerializer.putChildAddresses(os, childAddr, nkeys+1); // #of entries spanned per child. ! putChildEntryCounts(os,childEntryCounts,nkeys+1); ! // Done using the DataOutputStream so flush to the ByteBuffer. ! os.flush(); } --- 737,750 ---- // keys. ! keySerializer.putKeys(buf, keys); // addresses. ! addrSerializer.putChildAddresses(buf, childAddr, nkeys+1); // #of entries spanned per child. ! putChildEntryCounts(buf,childEntryCounts,nkeys+1); ! // // Done using the DataOutputStream so flush to the ByteBuffer. ! // os.flush(); } *************** *** 746,750 **** /* * Masquerade the EOFException as a buffer overflow since that is ! * what it really represents. */ RuntimeException ex2 = new BufferOverflowException(); --- 754,760 ---- /* * Masquerade the EOFException as a buffer overflow since that is ! * what it really represents (@todo since ByteBuffer is not used ! * anymore we do not need to masquerade this and the javadoc should ! * be updated). */ RuntimeException ex2 = new BufferOverflowException(); *************** *** 770,779 **** final int nbytes = buf.position() - pos0; assert nbytes > SIZEOF_NODE_HEADER; // patch #of bytes written on the record format. ! buf.putInt(pos0 + OFFSET_NBYTES, nbytes); // compute checksum for data written. ! final int checksum = useChecksum ? chk.checksum(buf, pos0 + SIZEOF_CHECKSUM, pos0 + nbytes) : 0; --- 780,791 ---- final int nbytes = buf.position() - pos0; assert nbytes > SIZEOF_NODE_HEADER; + + ByteBuffer buf2 = ByteBuffer.wrap(buf.buf,0,nbytes); // patch #of bytes written on the record format. ! buf2.putInt(pos0 + OFFSET_NBYTES, nbytes); // compute checksum for data written. ! final int checksum = useChecksum ? chk.checksum(buf2, pos0 + SIZEOF_CHECKSUM, pos0 + nbytes) : 0; *************** *** 781,791 **** // write the checksum into the buffer. ! buf.putInt(pos0, checksum); ! // flip the buffer to prepare for reading. ! buf.flip(); // optionally compresses the record. ! return compress( buf ); } --- 793,808 ---- // write the checksum into the buffer. ! buf2.putInt(pos0, checksum); ! /* ! * Note: The position will be zero(0). The limit will be the #of bytes ! * in the buffer. ! */ ! ! // // flip the buffer to prepare for reading. ! // buf2.flip(); // optionally compresses the record. ! return compress( buf2 ); } *************** *** 974,978 **** } ! while (true) { try { --- 991,995 ---- } ! // while (true) { try { *************** *** 980,994 **** return putLeaf(_buf,leaf); } catch (BufferOverflowException ex) { ! extendBuffer(); } ! } } ! private ByteBuffer putLeaf(ByteBuffer buf, ILeafData leaf) { assert buf != null; --- 997,1017 ---- return putLeaf(_buf,leaf); + } catch (IOException ex) { + + throw new RuntimeException(ex); // exception is not expected. + } catch (BufferOverflowException ex) { ! throw ex; // exception is not expected. ! ! // extendBuffer(); } ! // } } ! private ByteBuffer putLeaf(DataOutputBuffer buf, ILeafData leaf) throws IOException { assert buf != null; *************** *** 1000,1004 **** final Object[] vals = leaf.getValues(); ! buf.clear(); /* --- 1023,1027 ---- final Object[] vals = leaf.getValues(); ! buf.reset(); /* *************** *** 1008,1021 **** // checksum ! buf.putInt(0); // will overwrite below with the checksum. // nbytes ! buf.putInt(0); // will overwrite below with the actual value. // nodeType ! buf.put(TYPE_LEAF); // this is a leaf node. // version ! buf.putShort(VERSION0); /* --- 1031,1044 ---- // checksum ! buf.writeInt(0); // will overwrite below with the checksum. // nbytes ! buf.writeInt(0); // will overwrite below with the actual value. // nodeType ! buf.writeByte(TYPE_LEAF); // this is a leaf node. // version ! buf.writeShort(VERSION0); /* *************** *** 1024,1036 **** * Note: wrapping this with a BufferedOutputStream is slightly slower. */ ! DataOutputStream os = new DataOutputStream(// ! new ByteBufferOutputStream(buf) ! // new BufferedOutputStream(new ByteBufferOutputStream(buf)) ! ); try { // branching factor. ! LongPacker.packLong(os, branchingFactor); // // #of keys --- 1047,1059 ---- * Note: wrapping this with a BufferedOutputStream is slightly slower. */ ! // DataOutputStream os = new DataOutputStream(// ! // new ByteBufferOutputStream(buf) ! //// new BufferedOutputStream(new ByteBufferOutputStream(buf)) ! // ); try { // branching factor. ! buf.packLong( branchingFactor); // // #of keys *************** *** 1038,1048 **** // keys. ! keySerializer.putKeys(os, keys); // values. ! valueSerializer.putValues(os, vals, nkeys); ! // Done using the DataOutputStream so flush to the ByteBuffer. ! os.flush(); } --- 1061,1071 ---- // keys. ! keySerializer.putKeys(buf, keys); // values. ! valueSerializer.putValues(buf, vals, nkeys); ! // // Done using the DataOutputStream so flush to the ByteBuffer. ! // os.flush(); } *************** *** 1052,1056 **** /* * Masquerade the EOFException as a buffer overflow since that is ! * what it really represents. */ RuntimeException ex2 = new BufferOverflowException(); --- 1075,1080 ---- /* * Masquerade the EOFException as a buffer overflow since that is ! * what it really represents (@todo we do not need to masquerade ! * this exception since we are not using ByteBuffer anymore). */ RuntimeException ex2 = new BufferOverflowException(); *************** *** 1077,1099 **** assert nbytes > SIZEOF_LEAF_HEADER; // patch #of bytes written on the record format. ! buf.putInt(pos0 + OFFSET_NBYTES, nbytes); ! // compute checksum ! final int checksum = (useChecksum ? chk.checksum(buf, pos0 + SIZEOF_CHECKSUM, pos0 + nbytes) : 0); // System.err.println("computed leaf checksum: "+checksum); // write checksum on buffer. ! buf.putInt(pos0, checksum); ! /* ! * Flip the buffer to prepare it for reading. The position will be zero ! * and the limit will be the #of bytes in the serialized record. */ ! buf.flip(); // optionally compresses the record. ! return compress( buf ); } --- 1101,1130 ---- assert nbytes > SIZEOF_LEAF_HEADER; + ByteBuffer buf2 = ByteBuffer.wrap(buf.buf,0,nbytes); + // patch #of bytes written on the record format. ! buf2.putInt(pos0 + OFFSET_NBYTES, nbytes); ! // compute checksum. ! final int checksum = (useChecksum ? chk.checksum(buf2, pos0 + SIZEOF_CHECKSUM, pos0 + nbytes) : 0); // System.err.println("computed leaf checksum: "+checksum); // write checksum on buffer. ! buf2.putInt(pos0, checksum); ! /* ! * Note: The position will be zero(0). The limit will be the #of bytes ! * in the buffer. */ ! ! // /* ! // * Flip the buffer to prepare it for reading. The position will be zero ! // * and the limit will be the #of bytes in the serialized record. ! // */ ! // buf2.flip(); // optionally compresses the record. ! return compress( buf2 ); } *************** *** 1254,1257 **** --- 1285,1289 ---- * * @param os + * The output stream. * @param childEntryCounts * The #of entries spanned by each direct child. *************** *** 1260,1264 **** * @throws IOException */ ! protected void putChildEntryCounts(DataOutputStream os, int[] childEntryCounts, int nchildren) throws IOException { --- 1292,1296 ---- * @throws IOException */ ! protected void putChildEntryCounts(DataOutput os, int[] childEntryCounts, int nchildren) throws IOException { Index: PackedAddressSerializer.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/PackedAddressSerializer.java,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** PackedAddressSerializer.java 5 Feb 2007 18:17:38 -0000 1.3 --- PackedAddressSerializer.java 12 Apr 2007 23:59:34 -0000 1.4 *************** *** 48,53 **** package com.bigdata.objndx; ! import java.io.DataInputStream; ! import java.io.DataOutputStream; import java.io.IOException; --- 48,53 ---- package com.bigdata.objndx; ! import java.io.DataInput; ! import java.io.DataOutput; import java.io.IOException; *************** *** 78,82 **** } ! public void putChildAddresses(DataOutputStream os, long[] childAddr, int nchildren) throws IOException { --- 78,82 ---- } ! public void putChildAddresses(DataOutputBuffer os, long[] childAddr, int nchildren) throws IOException { *************** *** 101,105 **** } ! public void getChildAddresses(DataInputStream is, long[] childAddr, int nchildren) throws IOException { --- 101,105 ---- } ! public void getChildAddresses(DataInput is, long[] childAddr, int nchildren) throws IOException { Index: KeyBufferSerializer.java =================================================================== RCS file: /cvsroot/cweb/bigdata/src/java/com/bigdata/objndx/KeyBufferSerializer.java,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** KeyBufferSerializer.java 1 Feb 2007 16:00:36 -0000 1.2 --- KeyBufferSerializer.java 12 Apr 2007 23:59:34 -0000 1.3 *************** *** 2,10 **** import java.io.ByteArrayOutputStream; ! import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import org.CognitiveWeb.extser.LongPacker; /** --- 2,12 ---- import java.io.ByteArrayOutputStream; ! import java.io.DataInput; ! import java.io.DataOutput; import java.io.DataOutputStream; import java.io.IOException; import org.CognitiveWeb.extser.LongPacker; + import org.CognitiveWeb.extser.ShortPacker; /** *************** *** 25,35 **** private static final long serialVersionUID = 7361581167520945586L; ! public static final transient int VERSION0 = 0x0; public static final transient IKeySerializer INSTANCE = new KeyBufferSerializer(); ! public IKeyBuffer getKeys(DataInputStream is) throws IOException { ! final int version = is.readInt(); if (version != VERSION0) --- 27,37 ---- private static final long serialVersionUID = 7361581167520945586L; ! public static final transient short VERSION0 = 0x0; public static final transient IKeySerializer INSTANCE = new KeyBufferSerializer(); ! public IKeyBuffer getKeys(DataInput is) throws IOException { ! final short version = ShortPacker.unpackShort(is); if (version != VERSION0) *************** *** 72,78 **** } ! public void putKeys(DataOutputStream os, IKeyBuffer keys) throws IOException { ! os.writeInt(VERSION0); if(keys instanceof ImmutableKeyBuffer ) { --- 74,80 ---- } ! public void putKeys(DataOutputBuffer os, IKeyBuffer keys) throws IOException { ! os.packShort(VERSION0); if(keys instanceof ImmutableKeyBuffer ) { *************** *** 88,92 **** } ! protected void putKeys2(DataOutputStream os, ImmutableKeyBuffer keys) throws IOException { final int nkeys = keys.nkeys; --- 90,94 ---- } ! protected void putKeys2(DataOutputBuffer os, ImmutableKeyBuffer keys) throws IOException { final int nkeys = keys.nkeys; *************** *** 95,105 **** // #of keys in the node or leaf. ! LongPacker.packLong(os, nkeys); // maximum #of keys allowed in the node or leaf. ! LongPacker.packLong(os, keys.maxKeys); // length of the byte[] buffer containing the prefix and remainder for each key. ! LongPacker.packLong(os, bufferLength); /* --- 97,110 ---- // #of keys in the node or leaf. ! // LongPacker.packLong(os, nkeys); ! os.packLong(nkeys); // maximum #of keys allowed in the node or leaf. ! // LongPacker.packLong(os, keys.maxKeys); ! os.packLong(keys.maxKeys); // length of the byte[] buffer containing the prefix and remainder for each key. ! // LongPacker.packLong(os, bufferLength); ! os.packLong(bufferLength); /* *************** *** 114,118 **** int delta = offset - lastOffset; ! LongPacker.packLong(os, delta); lastOffset = offset; --- 119,124 ---- int delta = offset - lastOffset; ! // LongPacker.packLong(os, delta); ! os.packLong(delta); lastOffset = offset; *************** *** 120,124 **** } ! os.write(keys.buf); } --- 126,131 ---- } ! // os.write(keys.buf); ! os.write(keys.buf, 0, bufferLength); } *************** *** 134,138 **** * @throws IOException */ ! protected void putKeys2(DataOutputStream os, MutableKeyBuffer keys) throws IOException { final int nkeys = keys.nkeys; --- 141,145 ---- * @throws IOException */ ! protected void putKeys2(DataOutputBuffer os, MutableKeyBuffer keys) throws IOException { final int nkeys = keys.nkeys; *************** *** 160,175 **** // #of keys in the node or leaf. ! LongPacker.packLong(os, nkeys); // maximum #of keys allowed in the node or leaf. ! LongPacker.packLong(os, keys.getMaxKeys()); // length of the byte[] buffer containing the prefix and remainder for each key. ! LongPacker.packLong(os, bufferLength); /* * Write out deltas between offsets. * ! * FIXME this is 60% of the cost of this method. This is not pack long * so much as doing individual byte put operations on the output stream * (which is over a ByteBuffer). Just using a BAOS here doubles the --- 167,185 ---- // #of keys in the node or leaf. ! // LongPacker.packLong(os, nkeys); ! os.packLong(nkeys); // maximum #of keys allowed in the node or leaf. ! // LongPacker.packLong(os, keys.getMaxKeys()); ! os.packLong(keys.getMaxKeys()); // length of the byte[] buffer containing the prefix and remainder for each key. ! // LongPacker.packLong(os, bufferLength); ! os.packLong(bufferLength); /* * Write out deltas between offsets. * ! * Note: this is 60% of the cost of this method. This is not pack long * so much as doing individual byte put operations on the output stream * (which is over a ByteBuffer). Just using a BAOS here doubles the *************** *** 177,182 **** */ { ! ByteArrayOutputStream baos = new ByteArrayOutputStream(nkeys*8); ! DataOutputStream dbaos = new DataOutputStream(baos); int lastOffset = 0; --- 187,192 ---- */ { ! // ByteArrayOutputStream baos = new ByteArrayOutputStream(nkeys*8); ! // DataOutputStream dbaos = new DataOutputStream(baos); int lastOffset = 0; *************** *** 188,192 **** int delta = offset - lastOffset; ! LongPacker.packLong(dbaos, delta); lastOffset = offset; --- 198,203 ---- int delta = offset - lastOffset; ! // LongPacker.packLong(dbaos, delta); ! os.packLong(delta); lastOffset = offset; *************** *** 194,200 **** } ! dbaos.flush(); ! ! os.write(baos.toByteArray()); } --- 205,211 ---- } ! // dbaos.flush(); ! // ! // os.write(baos.toByteArray()); } |