|
From: <mrp...@us...> - 2014-01-27 16:56:05
|
Revision: 7833
http://bigdata.svn.sourceforge.net/bigdata/?rev=7833&view=rev
Author: mrpersonick
Date: 2014-01-27 16:55:56 +0000 (Mon, 27 Jan 2014)
Log Message:
-----------
no longer overloading the context position. found and fixed most of the test cases that rely on that functionality. there might be a few more. also got rid of the bigdata rdf/xml parser and writer.
Modified Paths:
--------------
branches/RDR/bigdata/src/resources/logging/log4j.properties
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/internal/impl/bnode/SidIV.java
branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/StatementBuffer.java
branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/rio/TestAll.java
branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/rio/TestStatementBuffer.java
branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/reif/rdr-01.ttl
branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/store/TestStatementIdentifiers.java
branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/ProxyBigdataSailTestCase.java
branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/TestProvenanceQuery.java
branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSids.java
Added Paths:
-----------
branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/provenance01.ttl
branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/sids.ttl
Removed Paths:
-------------
branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/provenance01.rdf
branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/sids.rdf
Modified: branches/RDR/bigdata/src/resources/logging/log4j.properties
===================================================================
--- branches/RDR/bigdata/src/resources/logging/log4j.properties 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata/src/resources/logging/log4j.properties 2014-01-27 16:55:56 UTC (rev 7833)
@@ -18,6 +18,8 @@
#log4j.logger.com.bigdata.rdf.rio.StatementBuffer=ALL
#log4j.logger.com.bigdata.rdf.sail.TestProvenanceQuery=ALL
+#log4j.logger.com.bigdata.rdf.sail.TestSids=ALL
+#log4j.logger.com.bigdata.rdf.sail.ProxyBigdataSailTestCase=ALL
# Test suite loggers.
#log4j.logger.junit=INFO
Modified: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/ServiceProviderHook.java 2014-01-27 16:55:56 UTC (rev 7833)
@@ -108,7 +108,7 @@
final RDFParserRegistry r = RDFParserRegistry.getInstance();
- r.add(new BigdataRDFXMLParserFactory());
+// r.add(new BigdataRDFXMLParserFactory());
// // Note: This ensures that the RDFFormat for NQuads is loaded.
// r.get(RDFFormat.NQUADS);
@@ -120,14 +120,14 @@
}
- // Ditto, but for the writer.
- {
- final RDFWriterRegistry r = RDFWriterRegistry.getInstance();
+// // Ditto, but for the writer.
+// {
+// final RDFWriterRegistry r = RDFWriterRegistry.getInstance();
+//
+// r.add(new BigdataRDFXMLWriterFactory());
+//
+// }
- r.add(new BigdataRDFXMLWriterFactory());
-
- }
-
// {
// final PropertiesParserRegistry r = PropertiesParserRegistry.getInstance();
//
Modified: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/internal/impl/bnode/SidIV.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/internal/impl/bnode/SidIV.java 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/internal/impl/bnode/SidIV.java 2014-01-27 16:55:56 UTC (rev 7833)
@@ -198,6 +198,7 @@
final int signum = key.length > 0 ? 1 : 0;
final BigInteger bi = new BigInteger(signum, key);
return 's' + bi.toString();
+// return toString();
}
/**
Modified: branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/StatementBuffer.java
===================================================================
--- branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/StatementBuffer.java 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata-rdf/src/java/com/bigdata/rdf/rio/StatementBuffer.java 2014-01-27 16:55:56 UTC (rev 7833)
@@ -29,7 +29,7 @@
import java.util.HashMap;
import java.util.HashSet;
-import java.util.Iterator;
+import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
@@ -406,7 +406,7 @@
/*
* Process deferred statements (NOP unless using statement identifiers).
*/
- processDeferredStatements();
+// processDeferredStatements();
// flush anything left in the buffer.
incrementalWrite();
@@ -418,272 +418,272 @@
}
- /**
- * Processes the {@link #deferredStmts deferred statements}.
- * <p>
- * When statement identifiers are enabled the processing of statements using
- * blank nodes in their subject or object position must be deferred until we
- * know whether or not the blank node is being used as a statement
- * identifier (blank nodes are not allowed in the predicate position by the
- * RDF data model). If the blank node is being used as a statement
- * identifier then its {@link IV} will be assigned based on
- * the {s,p,o} triple. If it is being used as a blank node, then the
- * {@link IV} is assigned using the blank node ID.
- * <p>
- * Deferred statements are processed as follows:
- * <ol>
- *
- * <li>Collect all deferred statements whose blank node bindings never show
- * up in the context position of a statement (
- * {@link BigdataBNode#getStatementIdentifier()} is <code>false</code>).
- * Those blank nodes are NOT statement identifiers so we insert them into
- * the lexicon and the insert the collected statements as well.</li>
- *
- * <li>The remaining deferred statements are processed in "cliques". Each
- * clique consists of all remaining deferred statements whose {s,p,o} have
- * become fully defined by virtue of a blank node becoming bound as a
- * statement identifier. A clique is collected by a full pass over the
- * remaining deferred statements. This process repeats until no statements
- * are identified (an empty clique or fixed point).</li>
- *
- * </ol>
- * If there are remaining deferred statements then they contain cycles. This
- * is an error and an exception is thrown.
- *
- * @todo on each {@link #flush()}, scan the deferred statements for those
- * which are fully determined (bnodes are flagged as statement
- * identifiers) to minimize the build up for long documents?
- */
- protected void processDeferredStatements() {
-
- if (!statementIdentifiers || deferredStmts == null
- || deferredStmts.isEmpty()) {
-
- // NOP.
-
- return;
-
- }
-
- if (log.isInfoEnabled())
- log.info("processing " + deferredStmts.size()
- + " deferred statements");
-
- /*
- * Need to flush the terms out to the dictionary or the reification
- * process will not work correctly.
- */
- incrementalWrite();
-
- try {
-
- // Note: temporary override - clear by finally{}.
- statementIdentifiers = false;
-
- // stage 0
- if (reifiedStmts != null) {
-
- for (Map.Entry<BigdataBNodeImpl, ReifiedStmt> e : reifiedStmts.entrySet()) {
-
- final BigdataBNodeImpl sid = e.getKey();
-
- final ReifiedStmt reifiedStmt = e.getValue();
-
- if (!reifiedStmt.isFullyBound(arity)) {
-
- log.warn("unfinished reified stmt: " + reifiedStmt);
-
- continue;
-
- }
-
- final BigdataStatement stmt = valueFactory.createStatement(
- reifiedStmt.getSubject(),
- reifiedStmt.getPredicate(),
- reifiedStmt.getObject(),
- reifiedStmt.getContext(),
- StatementEnum.Explicit);
-
- sid.setStatement(stmt);
-
- sid.setIV(new SidIV(new SPO(stmt)));
-
- if (log.isInfoEnabled()) {
- log.info("reified sid conversion: sid=" + sid + ", stmt=" + stmt);
- }
-
- }
-
- if (log.isInfoEnabled()) {
-
- for (BigdataBNodeImpl sid : reifiedStmts.keySet()) {
-
- log.info("sid: " + sid + ", iv=" + sid.getIV());
-
- }
-
- }
-
- }
-
- // stage 1.
- {
-
- final int nbefore = deferredStmts.size();
-
- int n = 0;
-
- final Iterator<BigdataStatement> itr = deferredStmts.iterator();
-
- while(itr.hasNext()) {
-
- final BigdataStatement stmt = itr.next();
-
- if (stmt.getSubject() instanceof BNode
- && ((BigdataBNode) stmt.getSubject()).isStatementIdentifier())
- continue;
-
- if (stmt.getObject() instanceof BNode
- && ((BigdataBNode) stmt.getObject()).isStatementIdentifier())
- continue;
-
- if(log.isDebugEnabled()) {
- log.debug("grounded: "+stmt);
- }
-
- if (stmt.getSubject() instanceof BNode)
- addTerm(stmt.getSubject());
-
- if (stmt.getObject() instanceof BNode)
- addTerm(stmt.getObject());
-
- // fully grounded so add to the buffer.
- add(stmt);
-
- // the statement has been handled.
- itr.remove();
-
- n++;
-
- }
-
- if (log.isInfoEnabled())
- log.info(""+ n
- + " out of "
- + nbefore
- + " deferred statements used only blank nodes (vs statement identifiers).");
-
- /*
- * Flush everything in the buffer so that the blank nodes that
- * are really blank nodes will have their term identifiers
- * assigned.
- */
-
- incrementalWrite();
-
- }
-
- // stage 2.
- if(!deferredStmts.isEmpty()) {
-
- int nrounds = 0;
-
- while(true) {
-
- nrounds++;
-
- final int nbefore = deferredStmts.size();
-
- final Iterator<BigdataStatement> itr = deferredStmts.iterator();
-
- while(itr.hasNext()) {
-
- final BigdataStatement stmt = itr.next();
-
- if (log.isDebugEnabled()) {
- log.debug(stmt.getSubject() + ", iv=" + stmt.s());
- }
-
- if (stmt.getSubject() instanceof BNode
- && ((BigdataBNode) stmt.getSubject()).isStatementIdentifier()
- && stmt.s() == null)
- continue;
-
- if (stmt.getObject() instanceof BNode
- && ((BigdataBNode) stmt.getObject()).isStatementIdentifier()
- && stmt.o() == null)
- continue;
-
- if (log.isDebugEnabled()) {
- log.debug("round="+nrounds+", grounded: "+stmt);
- }
-
- // fully grounded so add to the buffer.
- add(stmt);
-
- // deferred statement has been handled.
- itr.remove();
-
- }
-
- final int nafter = deferredStmts.size();
-
- if (log.isInfoEnabled())
- log.info("round=" + nrounds+" : #before="+nbefore+", #after="+nafter);
-
- if(nafter == nbefore) {
-
- if (log.isInfoEnabled())
- log.info("fixed point after " + nrounds
- + " rounds with " + nafter
- + " ungrounded statements");
-
- break;
-
- }
-
- /*
- * Flush the buffer so that we can obtain the statement
- * identifiers for all statements in this clique.
- */
-
- incrementalWrite();
-
- } // next clique.
-
- final int nremaining = deferredStmts.size();
-
- if (nremaining > 0) {
-
- if (log.isDebugEnabled()) {
-
- for (BigdataStatement s : deferredStmts) {
- log.debug("could not ground: " + s);
- }
-
- }
-
- throw new StatementCyclesException(
- "" + nremaining
- + " statements can not be grounded");
-
- }
-
-
- } // stage 2.
-
- } finally {
-
- // Note: restore flag!
- statementIdentifiers = true;
-
- deferredStmts = null;
-
- reifiedStmts = null;
-
- }
-
- }
+// /**
+// * Processes the {@link #deferredStmts deferred statements}.
+// * <p>
+// * When statement identifiers are enabled the processing of statements using
+// * blank nodes in their subject or object position must be deferred until we
+// * know whether or not the blank node is being used as a statement
+// * identifier (blank nodes are not allowed in the predicate position by the
+// * RDF data model). If the blank node is being used as a statement
+// * identifier then its {@link IV} will be assigned based on
+// * the {s,p,o} triple. If it is being used as a blank node, then the
+// * {@link IV} is assigned using the blank node ID.
+// * <p>
+// * Deferred statements are processed as follows:
+// * <ol>
+// *
+// * <li>Collect all deferred statements whose blank node bindings never show
+// * up in the context position of a statement (
+// * {@link BigdataBNode#getStatementIdentifier()} is <code>false</code>).
+// * Those blank nodes are NOT statement identifiers so we insert them into
+// * the lexicon and the insert the collected statements as well.</li>
+// *
+// * <li>The remaining deferred statements are processed in "cliques". Each
+// * clique consists of all remaining deferred statements whose {s,p,o} have
+// * become fully defined by virtue of a blank node becoming bound as a
+// * statement identifier. A clique is collected by a full pass over the
+// * remaining deferred statements. This process repeats until no statements
+// * are identified (an empty clique or fixed point).</li>
+// *
+// * </ol>
+// * If there are remaining deferred statements then they contain cycles. This
+// * is an error and an exception is thrown.
+// *
+// * @todo on each {@link #flush()}, scan the deferred statements for those
+// * which are fully determined (bnodes are flagged as statement
+// * identifiers) to minimize the build up for long documents?
+// */
+// protected void processDeferredStatements() {
+//
+// if (!statementIdentifiers || deferredStmts == null
+// || deferredStmts.isEmpty()) {
+//
+// // NOP.
+//
+// return;
+//
+// }
+//
+// if (log.isInfoEnabled())
+// log.info("processing " + deferredStmts.size()
+// + " deferred statements");
+//
+// /*
+// * Need to flush the terms out to the dictionary or the reification
+// * process will not work correctly.
+// */
+// incrementalWrite();
+//
+// try {
+//
+// // Note: temporary override - clear by finally{}.
+// statementIdentifiers = false;
+//
+// // stage 0
+// if (reifiedStmts != null) {
+//
+// for (Map.Entry<BigdataBNodeImpl, ReifiedStmt> e : reifiedStmts.entrySet()) {
+//
+// final BigdataBNodeImpl sid = e.getKey();
+//
+// final ReifiedStmt reifiedStmt = e.getValue();
+//
+// if (!reifiedStmt.isFullyBound(arity)) {
+//
+// log.warn("unfinished reified stmt: " + reifiedStmt);
+//
+// continue;
+//
+// }
+//
+// final BigdataStatement stmt = valueFactory.createStatement(
+// reifiedStmt.getSubject(),
+// reifiedStmt.getPredicate(),
+// reifiedStmt.getObject(),
+// reifiedStmt.getContext(),
+// StatementEnum.Explicit);
+//
+// sid.setStatement(stmt);
+//
+// sid.setIV(new SidIV(new SPO(stmt)));
+//
+// if (log.isInfoEnabled()) {
+// log.info("reified sid conversion: sid=" + sid + ", stmt=" + stmt);
+// }
+//
+// }
+//
+// if (log.isInfoEnabled()) {
+//
+// for (BigdataBNodeImpl sid : reifiedStmts.keySet()) {
+//
+// log.info("sid: " + sid + ", iv=" + sid.getIV());
+//
+// }
+//
+// }
+//
+// }
+//
+// // stage 1.
+// {
+//
+// final int nbefore = deferredStmts.size();
+//
+// int n = 0;
+//
+// final Iterator<BigdataStatement> itr = deferredStmts.iterator();
+//
+// while(itr.hasNext()) {
+//
+// final BigdataStatement stmt = itr.next();
+//
+// if (stmt.getSubject() instanceof BNode
+// && ((BigdataBNode) stmt.getSubject()).isStatementIdentifier())
+// continue;
+//
+// if (stmt.getObject() instanceof BNode
+// && ((BigdataBNode) stmt.getObject()).isStatementIdentifier())
+// continue;
+//
+// if(log.isDebugEnabled()) {
+// log.debug("grounded: "+stmt);
+// }
+//
+// if (stmt.getSubject() instanceof BNode)
+// addTerm(stmt.getSubject());
+//
+// if (stmt.getObject() instanceof BNode)
+// addTerm(stmt.getObject());
+//
+// // fully grounded so add to the buffer.
+// add(stmt);
+//
+// // the statement has been handled.
+// itr.remove();
+//
+// n++;
+//
+// }
+//
+// if (log.isInfoEnabled())
+// log.info(""+ n
+// + " out of "
+// + nbefore
+// + " deferred statements used only blank nodes (vs statement identifiers).");
+//
+// /*
+// * Flush everything in the buffer so that the blank nodes that
+// * are really blank nodes will have their term identifiers
+// * assigned.
+// */
+//
+// incrementalWrite();
+//
+// }
+//
+// // stage 2.
+// if(!deferredStmts.isEmpty()) {
+//
+// int nrounds = 0;
+//
+// while(true) {
+//
+// nrounds++;
+//
+// final int nbefore = deferredStmts.size();
+//
+// final Iterator<BigdataStatement> itr = deferredStmts.iterator();
+//
+// while(itr.hasNext()) {
+//
+// final BigdataStatement stmt = itr.next();
+//
+// if (log.isDebugEnabled()) {
+// log.debug(stmt.getSubject() + ", iv=" + stmt.s());
+// }
+//
+// if (stmt.getSubject() instanceof BNode
+// && ((BigdataBNode) stmt.getSubject()).isStatementIdentifier()
+// && stmt.s() == null)
+// continue;
+//
+// if (stmt.getObject() instanceof BNode
+// && ((BigdataBNode) stmt.getObject()).isStatementIdentifier()
+// && stmt.o() == null)
+// continue;
+//
+// if (log.isDebugEnabled()) {
+// log.debug("round="+nrounds+", grounded: "+stmt);
+// }
+//
+// // fully grounded so add to the buffer.
+// add(stmt);
+//
+// // deferred statement has been handled.
+// itr.remove();
+//
+// }
+//
+// final int nafter = deferredStmts.size();
+//
+// if (log.isInfoEnabled())
+// log.info("round=" + nrounds+" : #before="+nbefore+", #after="+nafter);
+//
+// if(nafter == nbefore) {
+//
+// if (log.isInfoEnabled())
+// log.info("fixed point after " + nrounds
+// + " rounds with " + nafter
+// + " ungrounded statements");
+//
+// break;
+//
+// }
+//
+// /*
+// * Flush the buffer so that we can obtain the statement
+// * identifiers for all statements in this clique.
+// */
+//
+// incrementalWrite();
+//
+// } // next clique.
+//
+// final int nremaining = deferredStmts.size();
+//
+// if (nremaining > 0) {
+//
+// if (log.isDebugEnabled()) {
+//
+// for (BigdataStatement s : deferredStmts) {
+// log.debug("could not ground: " + s);
+// }
+//
+// }
+//
+// throw new StatementCyclesException(
+// "" + nremaining
+// + " statements can not be grounded");
+//
+// }
+//
+//
+// } // stage 2.
+//
+// } finally {
+//
+// // Note: restore flag!
+// statementIdentifiers = true;
+//
+// deferredStmts = null;
+//
+// reifiedStmts = null;
+//
+// }
+//
+// }
/**
* Clears all buffered data, including the canonicalizing mapping for blank
@@ -770,13 +770,19 @@
*/
protected void incrementalWrite() {
+ /*
+ * Look for non-sid bnodes and add them to the values to be written
+ * to the database (if they haven't already been written).
+ */
if (bnodes != null) {
for (BigdataBNode bnode : bnodes.values()) {
+ // sid, skip
if (bnode.isStatementIdentifier())
continue;
+ // already written, skip
if (bnode.getIV() != null)
continue;
@@ -973,12 +979,6 @@
final BigdataStatement stmt = stmts[i];
- /*
- * Note: context position is not passed when statement identifiers
- * are in use since the statement identifier is assigned based on
- * the {s,p,o} triple.
- */
-
final SPO spo = new SPO(stmt);
if (log.isDebugEnabled())
@@ -995,15 +995,6 @@
}
/*
- * When true, we will be handling statement identifiers.
- *
- * Note: this is based on the flag on the database rather than the flag
- * on the StatementBuffer since the latter is temporarily overridden when
- * processing deferred statements.
- */
- final boolean sids = database.getStatementIdentifiers();
-
- /*
* Note: When handling statement identifiers, we clone tmp[] to avoid a
* side-effect on its order so that we can unify the assigned statement
* identifiers below.
@@ -1015,77 +1006,77 @@
// final long nwritten = writeSPOs(sids ? tmp.clone() : tmp, numStmts);
final long nwritten = writeSPOs(tmp.clone(), numStmts);
- if (sids) {
-
- /*
- * Unify each assigned statement identifier with the context
- * position on the corresponding statement.
- */
-
- for (int i = 0; i < numStmts; i++) {
-
- final SPO spo = tmp[i];
-
- final BigdataStatement stmt = stmts[i];
-
- // verify that the BigdataStatement and SPO are the same triple.
- assert stmt.s() == spo.s;
- assert stmt.p() == spo.p;
- assert stmt.o() == spo.o;
-
- final BigdataResource c = stmt.getContext();
-
- if (c == null)
- continue;
-
-// if (c instanceof URI) {
+// if (sids) {
//
-// throw new UnificationException(
-// "URI not permitted in context position when statement identifiers are enabled: "
-// + stmt);
+// /*
+// * Unify each assigned statement identifier with the context
+// * position on the corresponding statement.
+// */
+//
+// for (int i = 0; i < numStmts; i++) {
+//
+// final SPO spo = tmp[i];
+//
+// final BigdataStatement stmt = stmts[i];
+//
+// // verify that the BigdataStatement and SPO are the same triple.
+// assert stmt.s() == spo.s;
+// assert stmt.p() == spo.p;
+// assert stmt.o() == spo.o;
+//
+// final BigdataResource c = stmt.getContext();
+//
+// if (c == null)
+// continue;
+//
+//// if (c instanceof URI) {
+////
+//// throw new UnificationException(
+//// "URI not permitted in context position when statement identifiers are enabled: "
+//// + stmt);
+////
+//// }
+//
+// if( c instanceof BNode) {
+//
+// final IV sid = spo.getStatementIdentifier();
//
+// if(c.getIV() != null) {
+//
+// if (!sid.equals(c.getIV())) {
+//
+// throw new UnificationException(
+// "Can not unify blankNode "
+// + c
+// + "("
+// + c.getIV()
+// + ")"
+// + " in context position with statement identifier="
+// + sid + ": " + stmt + " (" + spo
+// + ")");
+//
+// }
+//
+// } else {
+//
+// // assign the statement identifier.
+// c.setIV(sid);
+//
+// if (log.isDebugEnabled()) {
+//
+// log.debug("Assigned statement identifier: " + c
+// + "=" + sid);
+//
+// }
+//
+// }
+//
// }
-
- if( c instanceof BNode) {
+//
+// }
+//
+// }
- final IV sid = spo.getStatementIdentifier();
-
- if(c.getIV() != null) {
-
- if (!sid.equals(c.getIV())) {
-
- throw new UnificationException(
- "Can not unify blankNode "
- + c
- + "("
- + c.getIV()
- + ")"
- + " in context position with statement identifier="
- + sid + ": " + stmt + " (" + spo
- + ")");
-
- }
-
- } else {
-
- // assign the statement identifier.
- c.setIV(sid);
-
- if (log.isDebugEnabled()) {
-
- log.debug("Assigned statement identifier: " + c
- + "=" + sid);
-
- }
-
- }
-
- }
-
- }
-
- }
-
// Copy the state of the isModified() flag
for (int i = 0; i < numStmts; i++) {
@@ -1346,6 +1337,10 @@
} else if (term instanceof BNode) {
+ /*
+ * Handle bnodes separately, in incrementalWrite().
+ */
+
// if (!statementIdentifiers) {
//
// numBNodes++;
@@ -1409,102 +1404,101 @@
final BigdataStatement stmt = valueFactory.createStatement(s, p, o, c, type);
- if (statementIdentifiers
- && ((s instanceof BNode && ((BigdataBNode) s).getStatement() == null)
-// ||
-// (o instanceof BNode && ((BigdataBNode) o).getStatement() == null)
- )) {
+ /*
+ * Specifically looking for reification syntax:
+ * _:sid rdf:type Statement .
+ * _:sid rdf:subject <S> .
+ * _:sid rdf:predicate <P> .
+ * _:sid rdf:object <O> .
+ */
+ if (statementIdentifiers && s instanceof BNode) {
+
+ if (equals(p, RDF_SUBJECT, RDF_PREDICATE, RDF_OBJECT)) {
+
+ final BigdataBNodeImpl sid = (BigdataBNodeImpl) s;
+
+ if (sid.getStatement() != null) {
- /*
- * When statement identifiers are enabled a statement with a
- * blank node in the subject or object position must be deferred
- * until the end of the source so that we determine whether it
- * is being used as a statement identifier or a blank node (if
- * the blank node occurs in the context position, then we know
- * that it is being used as a statement identifier).
- */
-
- if (//s instanceof BNode &&
- equals(p, RDF_SUBJECT, RDF_PREDICATE, RDF_OBJECT)) {
-
- final BigdataBNodeImpl sid = (BigdataBNodeImpl) s;
+ checkSid(sid, p, o);
+
+ log.warn("seeing a duplicate value for " + sid + ": " + p +"=" + o);
+
+ return;
+
+ }
+
+ if (reifiedStmts == null) {
+
+ reifiedStmts = new HashMap<BigdataBNodeImpl, ReifiedStmt>();
+
+ }
+
+ final ReifiedStmt reifiedStmt;
+ if (reifiedStmts.containsKey(sid)) {
+
+ reifiedStmt = reifiedStmts.get(sid);
+
+ } else {
+
+ reifiedStmt = new ReifiedStmt();
+
+ reifiedStmts.put(sid, reifiedStmt);
+
+ }
+
+ reifiedStmt.set(p, o);
+
+ if (log.isDebugEnabled())
+ log.debug("reified piece: "+stmt);
+
+ if (reifiedStmt.isFullyBound(arity)) {
+
+ sid.setStatement(reifiedStmt.toStatement(valueFactory));
+
+ reifiedStmts.remove(sid);
+
+ }
+
+ return;
+
+ } else if (equals(o, RDF_STATEMENT) && equals(p, RDF_TYPE)) {
- if (reifiedStmts == null) {
-
- reifiedStmts = new HashMap<BigdataBNodeImpl, ReifiedStmt>();
-
- }
+ /*
+ * Ignore these statements.
+ *
+ * _:sid rdf:type rdf:Statement .
+ */
+ return;
- final ReifiedStmt reifiedStmt;
- if (reifiedStmts.containsKey(sid)) {
-
- reifiedStmt = reifiedStmts.get(sid);
-
- } else {
-
- reifiedStmt = new ReifiedStmt();
-
- reifiedStmts.put(sid, reifiedStmt);
-
- }
-
- reifiedStmt.set(p, (BigdataValue) o);
-
- if (log.isDebugEnabled())
- log.debug("reified piece: "+stmt);
-
- if (reifiedStmt.isFullyBound(arity)) {
-
- sid.setStatement(reifiedStmt.toStatement(valueFactory));
-
- reifiedStmts.remove(sid);
-
- }
-
- return;
+ }
- }
-// else {
-//
-// if (deferredStmts == null) {
-//
-// deferredStmts = new HashSet<BigdataStatement>(stmts.length);
-//
-// }
-//
-// deferredStmts.add(stmt);
-//
-// if (log.isDebugEnabled())
-// log.debug("deferred: "+stmt);
-//
-// }
-//
-// } else {
-
}
- if (statementIdentifiers && s instanceof BNode &&
- equals(o, RDF_STATEMENT) && equals(p, RDF_TYPE)) {
-
- // ignore this statement
-
- return;
-
- }
-
- // add to the buffer.
- stmts[numStmts++] = stmt;
+ // add to the buffer.
+ stmts[numStmts++] = stmt;
+// if (c != null && statementIdentifiers && c instanceof BNode) {
+//
+// ((BigdataBNodeImpl) c).setStatement(stmt);
+//
// }
- if (c != null && statementIdentifiers && c instanceof BNode) {
-
- ((BigdataBNodeImpl) c).setStatement(stmt);
-
- }
-
}
+ private void checkSid(final BigdataBNode sid, final URI p, final Value o) {
+
+ final BigdataStatement stmt = sid.getStatement();
+
+ if ((p == RDF_SUBJECT && stmt.getSubject() != o) ||
+ (p == RDF_PREDICATE && stmt.getPredicate() != o) ||
+ (p == RDF_OBJECT && stmt.getObject() != o)) {
+
+ throw new UnificationException("sid cannot refer to multiple statements");
+
+ }
+
+ }
+
private boolean equals(final BigdataValue v1, final BigdataValue... v2) {
if (v2.length == 1) {
Modified: branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/rio/TestAll.java
===================================================================
--- branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/rio/TestAll.java 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/rio/TestAll.java 2014-01-27 16:55:56 UTC (rev 7833)
@@ -67,14 +67,14 @@
*/
suite.addTestSuite(TestLoadAndVerify.class);
- /*
- * Correctness tests when SIDs are enabled and for blank node handling
- * using StatementBuffer and explicitly inserting specific triples (no
- * parsing). The RDF/XML interchange tests serialize the hand loaded
- * data and verify that it can be parsed and that the same graph is
- * obtained.
- */
- suite.addTestSuite(TestRDFXMLInterchangeWithStatementIdentifiers.class);
+// /*
+// * Correctness tests when SIDs are enabled and for blank node handling
+// * using StatementBuffer and explicitly inserting specific triples (no
+// * parsing). The RDF/XML interchange tests serialize the hand loaded
+// * data and verify that it can be parsed and that the same graph is
+// * obtained.
+// */
+// suite.addTestSuite(TestRDFXMLInterchangeWithStatementIdentifiers.class);
/*
* Test suite for "SIDS" support for NTRIPLES data. This test targets a
Modified: branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/rio/TestStatementBuffer.java
===================================================================
--- branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/rio/TestStatementBuffer.java 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/rio/TestStatementBuffer.java 2014-01-27 16:55:56 UTC (rev 7833)
@@ -135,7 +135,7 @@
buffer.handleStatement(s1, p1, o1, c1, StatementEnum.Explicit);
- assertEquals(7, buffer.numURIs);
+ assertEquals(8, buffer.numURIs);
assertEquals(0, buffer.numLiterals);
assertEquals(0, buffer.numBNodes);
assertEquals(1, buffer.numStmts);
@@ -151,7 +151,7 @@
buffer.handleStatement(s2, p2, o2, c2, StatementEnum.Explicit);
- assertEquals(8, buffer.numURIs); // only 4 since one is duplicate.
+ assertEquals(9, buffer.numURIs); // only 4 since one is duplicate.
assertEquals(1, buffer.numLiterals);
assertEquals(0, buffer.numBNodes);
assertEquals(2, buffer.numStmts);
@@ -167,7 +167,7 @@
buffer.handleStatement(s3, p3, o3, c3, StatementEnum.Explicit);
- assertEquals(8, buffer.numURIs);
+ assertEquals(9, buffer.numURIs);
assertEquals(1, buffer.numLiterals);
assertEquals(0, buffer.numBNodes);
assertEquals(3, buffer.numStmts);
@@ -178,7 +178,7 @@
buffer.handleStatement(s3, p3, o3, c3, StatementEnum.Explicit);
- assertEquals(8, buffer.numURIs);
+ assertEquals(9, buffer.numURIs);
assertEquals(1, buffer.numLiterals);
assertEquals(0, buffer.numBNodes);
assertEquals(4, buffer.numStmts);
Modified: branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/reif/rdr-01.ttl
===================================================================
--- branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/reif/rdr-01.ttl 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/reif/rdr-01.ttl 2014-01-27 16:55:56 UTC (rev 7833)
@@ -27,13 +27,6 @@
_:s1 dc:source <http://hr.example.com/employees#bob> ;
dc:created "2012-02-05T12:34:00Z"^^xsd:dateTime .
-_:s1 rdf:subject bd:alice .
-_:s1 rdf:predicate foaf:mbox .
-_:s1 rdf:object <mailto:alice@work> .
-_:s1 rdf:type rdf:Statement .
-_:s1 dc:source <http://hr.example.com/employees#bob> ;
- dc:created "2012-02-05T12:34:00Z"^^xsd:dateTime .
-
# Terse
#<<bd:alice foaf:knows bd:bob>>
# dc:source re:engine_1;
Modified: branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/store/TestStatementIdentifiers.java
===================================================================
--- branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/store/TestStatementIdentifiers.java 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata-rdf/src/test/com/bigdata/rdf/store/TestStatementIdentifiers.java 2014-01-27 16:55:56 UTC (rev 7833)
@@ -297,50 +297,50 @@
}
- final BigdataStatementIterator itr = store.getStatements(null, null, null);
+// final BigdataStatementIterator itr = store.getStatements(null, null, null);
+//
+// try {
+//
+// final Writer w = new StringWriter();
+//
+//// RDFXMLWriter rdfWriter = new RDFXMLWriter(w);
+//
+// final RDFWriterFactory writerFactory = RDFWriterRegistry
+// .getInstance().get(RDFFormat.RDFXML);
+//
+// assertNotNull(writerFactory);
+//
+// if (!(writerFactory instanceof BigdataRDFXMLWriterFactory))
+// fail("Expecting " + BigdataRDFXMLWriterFactory.class + " not "
+// + writerFactory.getClass());
+//
+// final RDFWriter rdfWriter = writerFactory.getWriter(w);
+//
+// rdfWriter.startRDF();
+//
+// while(itr.hasNext()) {
+//
+// final Statement stmt = itr.next();
+//
+// rdfWriter.handleStatement(stmt);
+//
+// }
+//
+// rdfWriter.endRDF();
+//
+// if (log.isInfoEnabled())
+// log.info(w.toString());
+//
+// } catch(Exception ex) {
+//
+// throw new RuntimeException(ex);
+//
+// } finally {
+//
+// itr.close();
+//
+// }
- try {
-
- final Writer w = new StringWriter();
-
-// RDFXMLWriter rdfWriter = new RDFXMLWriter(w);
-
- final RDFWriterFactory writerFactory = RDFWriterRegistry
- .getInstance().get(RDFFormat.RDFXML);
-
- assertNotNull(writerFactory);
-
- if (!(writerFactory instanceof BigdataRDFXMLWriterFactory))
- fail("Expecting " + BigdataRDFXMLWriterFactory.class + " not "
- + writerFactory.getClass());
-
- final RDFWriter rdfWriter = writerFactory.getWriter(w);
-
- rdfWriter.startRDF();
-
- while(itr.hasNext()) {
-
- final Statement stmt = itr.next();
-
- rdfWriter.handleStatement(stmt);
-
- }
-
- rdfWriter.endRDF();
-
- if (log.isInfoEnabled())
- log.info(w.toString());
-
- } catch(Exception ex) {
-
- throw new RuntimeException(ex);
-
- } finally {
-
- itr.close();
-
- }
-
/*
* Verify after restart.
*/
@@ -768,7 +768,10 @@
StatementBuffer buf = new StatementBuffer(store, 100/* capacity */);
// statement about itself is a cycle.
- buf.add(sid1, rdfType, A, sid1);
+ buf.add(sid1, RDF.TYPE, A);
+ buf.add(sid1, RDF.SUBJECT, sid1);
+ buf.add(sid1, RDF.PREDICATE, RDF.TYPE);
+ buf.add(sid1, RDF.OBJECT, A);
/*
* Flush to the database, resolving statement identifiers as
@@ -830,16 +833,23 @@
{
StatementBuffer buf = new StatementBuffer(store, 100/* capacity */);
- // a cycle with a period of one.
- buf.add(sid2, rdfType, B, sid1);
- buf.add(sid1, rdfType, B, sid2);
-
/*
* Flush to the database, resolving statement identifiers as
* necessary.
*/
try {
+ // a cycle with a period of one.
+ buf.add(sid2, RDF.TYPE, B);
+ buf.add(sid1, RDF.SUBJECT, sid2);
+ buf.add(sid1, RDF.PREDICATE, RDF.TYPE);
+ buf.add(sid1, RDF.OBJECT, B);
+
+ buf.add(sid1, RDF.TYPE, B);
+ buf.add(sid2, RDF.SUBJECT, sid1);
+ buf.add(sid2, RDF.PREDICATE, RDF.TYPE);
+ buf.add(sid2, RDF.OBJECT, B);
+
buf.flush();
fail("Expecting: "+UnificationException.class);
@@ -888,16 +898,23 @@
StatementBuffer buf = new StatementBuffer(store, 100/* capacity */);
- // same blank node in both two distinct statement is an error.
- buf.add(A, rdfType, C, sid1);
- buf.add(B, rdfType, C, sid1);
-
/*
* Flush to the database, resolving statement identifiers as
* necessary.
*/
try {
+ // same blank node in both two distinct statement is an error.
+ buf.add(A, RDF.TYPE, C);
+ buf.add(sid1, RDF.SUBJECT, A);
+ buf.add(sid1, RDF.PREDICATE, RDF.TYPE);
+ buf.add(sid1, RDF.OBJECT, C);
+
+ buf.add(B, RDF.TYPE, C);
+ buf.add(sid1, RDF.SUBJECT, B);
+ buf.add(sid1, RDF.PREDICATE, RDF.TYPE);
+ buf.add(sid1, RDF.OBJECT, C);
+
buf.flush();
fail("Expecting: "+UnificationException.class);
Modified: branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/ProxyBigdataSailTestCase.java
===================================================================
--- branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/ProxyBigdataSailTestCase.java 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/ProxyBigdataSailTestCase.java 2014-01-27 16:55:56 UTC (rev 7833)
@@ -37,6 +37,7 @@
import junit.extensions.proxy.IProxyTest;
import junit.framework.Test;
+import org.apache.log4j.Logger;
import org.openrdf.model.Resource;
import org.openrdf.query.Binding;
import org.openrdf.query.BindingSet;
@@ -68,6 +69,8 @@
implements IProxyTest
{
+// protected final transient static Logger log = Logger.getLogger(ProxyBigdataSailTestCase.class);
+
public ProxyBigdataSailTestCase() {}
public ProxyBigdataSailTestCase(String name){super(name);}
Modified: branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/TestProvenanceQuery.java
===================================================================
--- branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/TestProvenanceQuery.java 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/TestProvenanceQuery.java 2014-01-27 16:55:56 UTC (rev 7833)
@@ -100,8 +100,8 @@
final DataLoader dataLoader = sail.database.getDataLoader();
dataLoader.loadData(
- "bigdata-sails/src/test/com/bigdata/rdf/sail/provenance01.rdf",
- ""/*baseURL*/, RDFFormat.RDFXML);
+ "bigdata-sails/src/test/com/bigdata/rdf/sail/provenance01.ttl",
+ ""/*baseURL*/, RDFFormat.TURTLE);
}
Modified: branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSids.java
===================================================================
--- branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSids.java 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSids.java 2014-01-27 16:55:56 UTC (rev 7833)
@@ -28,12 +28,9 @@
import java.util.Properties;
import org.apache.log4j.Logger;
-import org.openrdf.model.Statement;
import org.openrdf.model.URI;
-import org.openrdf.model.ValueFactory;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.model.vocabulary.RDFS;
import org.openrdf.query.Binding;
import org.openrdf.query.BindingSet;
import org.openrdf.query.QueryLanguage;
@@ -43,18 +40,11 @@
import org.openrdf.rio.RDFFormat;
import com.bigdata.rdf.axioms.NoAxioms;
+import com.bigdata.rdf.model.BigdataBNode;
import com.bigdata.rdf.model.BigdataStatement;
-import com.bigdata.rdf.model.BigdataURI;
import com.bigdata.rdf.model.BigdataValueFactory;
-import com.bigdata.rdf.spo.ISPO;
-import com.bigdata.rdf.store.AbstractTripleStore;
-import com.bigdata.rdf.store.BD;
-import com.bigdata.rdf.store.BigdataStatementIterator;
import com.bigdata.rdf.vocab.NoVocabulary;
-import com.bigdata.relation.accesspath.IAccessPath;
-import cutthecrap.utils.striterators.ICloseableIterator;
-
/**
* Test case for reverse lookup from SID to statement.
*
@@ -108,7 +98,7 @@
cxn.setAutoCommit(false);
- cxn.add(getClass().getResourceAsStream("sids.rdf"), "", RDFFormat.RDFXML);
+ cxn.add(getClass().getResourceAsStream("sids.ttl"), "", RDFFormat.TURTLE);
/*
* Note: The either flush() or commit() is required to flush the
@@ -132,9 +122,10 @@
"PREFIX myns: <http://mynamespace.com#> " +
"SELECT distinct ?s ?p ?o " +
" { " +
- " ?sid myns:creator <http://1.com> . " +
+ " <<"+(s == null ? "?s" : "<"+s+">")+" ?p ?o>> myns:creator <http://1.com> . " +
+// " ?sid myns:creator <http://1.com> . " +
// " graph ?sid { ?s ?p ?o } " +
- " graph ?sid { "+(s == null ? "?s" : "<"+s+">")+" ?p ?o } " +
+// " graph ?sid { "+(s == null ? "?s" : "<"+s+">")+" ?p ?o } " +
" }";
final TupleQuery tupleQuery =
@@ -206,7 +197,7 @@
cxn.setAutoCommit(false);
- final ValueFactory vf = sail.getValueFactory();
+ final BigdataValueFactory vf = (BigdataValueFactory) sail.getValueFactory();
final URI host1 = vf.createURI("http://localhost/host1");
final URI host = vf.createURI("http://domainnamespace.com/host#Host");
@@ -239,27 +230,33 @@
// cxn.add(swtch2, RDF.TYPE, swtch, sid5);
// cxn.add(sid5, creator, src2);
- final Statement s1 = vf.createStatement(host1, RDF.TYPE, host, vf.createBNode());
- final Statement s2 = vf.createStatement(host1, connectedTo, swtch1, vf.createBNode());
- final Statement s3 = vf.createStatement(host1, connectedTo, swtch2, vf.createBNode());
- final Statement s4 = vf.createStatement(swtch1, RDF.TYPE, swtch, vf.createBNode());
- final Statement s5 = vf.createStatement(swtch2, RDF.TYPE, swtch, vf.createBNode());
+ final BigdataStatement s1 = vf.createStatement(host1, RDF.TYPE, host, vf.createBNode());
+ final BigdataStatement s2 = vf.createStatement(host1, connectedTo, swtch1, vf.createBNode());
+ final BigdataStatement s3 = vf.createStatement(host1, connectedTo, swtch2, vf.createBNode());
+ final BigdataStatement s4 = vf.createStatement(swtch1, RDF.TYPE, swtch, vf.createBNode());
+ final BigdataStatement s5 = vf.createStatement(swtch2, RDF.TYPE, swtch, vf.createBNode());
+ final BigdataBNode sid1 = vf.createBNode(s1);
+ final BigdataBNode sid2 = vf.createBNode(s2);
+ final BigdataBNode sid3 = vf.createBNode(s3);
+ final BigdataBNode sid4 = vf.createBNode(s4);
+ final BigdataBNode sid5 = vf.createBNode(s5);
+
cxn.add(s1);
- cxn.add(s1.getContext(), creator, src1);
- cxn.add(s1.getContext(), creator, src2);
+ cxn.add(sid1, creator, src1);
+ cxn.add(sid1, creator, src2);
cxn.add(s2);
- cxn.add(s2.getContext(), creator, src1);
+ cxn.add(sid2, creator, src1);
cxn.add(s3);
- cxn.add(s3.getContext(), creator, src2);
+ cxn.add(sid3, creator, src2);
cxn.add(s4);
- cxn.add(s4.getContext(), creator, src1);
+ cxn.add(sid4, creator, src1);
cxn.add(s5);
- cxn.add(s5.getContext(), creator, src2);
+ cxn.add(sid5, creator, src2);
cxn.flush();//commit();
@@ -278,9 +275,10 @@
"PREFIX myns: <http://mynamespace.com#> " +
"SELECT distinct ?s ?p ?o " +
" { " +
- " ?sid myns:creator <http://1.com> . " +
-// " graph ?sid { ?s ?p ?o } " +
- " graph ?sid { "+(s == null ? "?s" : "<"+s+">")+" ?p ?o } " +
+ " <<"+(s == null ? "?s" : "<"+s+">")+" ?p ?o>> myns:creator <http://1.com> . " +
+// " ?sid myns:creator <http://1.com> . " +
+//// " graph ?sid { ?s ?p ?o } " +
+// " graph ?sid { "+(s == null ? "?s" : "<"+s+">")+" ?p ?o } " +
" }";
final TupleQuery tupleQuery =
Deleted: branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/provenance01.rdf
===================================================================
(Binary files differ)
Copied: branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/provenance01.ttl (from rev 7809, branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/provenance01.rdf)
===================================================================
(Binary files differ)
Property changes on: branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/provenance01.ttl
___________________________________________________________________
Added: svn:mime-type
+ application/octet-stream
Deleted: branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/sids.rdf
===================================================================
--- branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/sids.rdf 2014-01-27 16:25:06 UTC (rev 7832)
+++ branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/sids.rdf 2014-01-27 16:55:56 UTC (rev 7833)
@@ -1,82 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<rdf:RDF
-
- xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
-
- xmlns:bigdata="http://www.bigdata.com/rdf#">
-
-
-
-<rdf:Description rdf:about="http://localhost/host1">
-
- <rdf:type bigdata:sid="S195" bigdata:statementType="Explicit" rdf:resource="http://domainnamespace.com/host#Host"/>
-
- <connectedTo xmlns="http://mynamespace.com#" bigdata:sid="S199" bigdata:statementType="Explicit" rdf:resource="http://localhost/switch1"/>
-
- <connectedTo xmlns="http://mynamespace.com#" bigdata:sid="S227" bigdata:statementType="Explicit" rdf:resource="http://localhost/switch2"/>
-
-</rdf:Description>
-
-
-
-<rdf:Description rdf:about="http://localhost/switch1">
-
- <rdf:type bigdata:sid="S203" bigdata:statementType="Explicit" rdf:resource="http://domainnamespace.com/san#Switch"/>
-
-</rdf:Description>
-
-
-
-<rdf:Description rdf:nodeID="S195">
-
- <creator xmlns="http://mynamespace.com#" bigdata:sid="S211" bigdata:statementType="Explicit" rdf:resource="http://1.com"/>
-
- <creator xmlns="http://mynamespace.com#" bigdata:sid="S239" bigdata:statementType="Explicit" rdf:resource="http://2.com"/>
-
-</rdf:Description>
-
-
-
-<rdf:Description rdf:nodeID="S199">
-
- <creator xmlns="http://mynamespace.com#" bigdata:sid="S215" bigdata:statementType="Explicit" rdf:resource="http://1.com"/>
-
-</rdf:Description>
-
-
-
-<rdf:Description rdf:nodeID="S203">
-
- <creator xmlns="http://mynamespace.com#" bigdata:sid="S219" bigdata:statementType="Explicit" rdf:resource="http://1.com"/>
-
-</rdf:Description>
-
-
-
-<rdf:Description rdf:about="http://localhost/switch2">
-
- <rdf:type bigdata:sid="S231" bigdata:statementType="Explicit" rdf:resource="http://domainnamespace.com/san#Switch"/>
-
-</rdf:Description>
-
-
-
-<rdf:Description rdf:nodeID="S227">
-
- <creator xmlns="http://mynamespace.com#" bigdata:sid="S243" bigdata:statementType="Explicit" rdf:resource="http://2.com"/>
-
-</rdf:Description>
-
-
-
-<rdf:Description rdf:nodeID="S231">
-
- <creator xmlns="http://mynamespace.com#" bigdata:sid="S247" bigdata:statementType="Explicit" rdf:resource="http://2.com"/>
-
-</rdf:Description>
-
-
-
-</rdf:RDF>
-
Copied: branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/sids.ttl (from rev 7809, branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/sids.rdf)
===================================================================
--- branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/sids.ttl (rev 0)
+++ branches/RDR/bigdata-sails/src/test/com/bigdata/rdf/sail/sids.ttl 2014-01-27 16:55:56 UTC (rev 7833)
@@ -0,0 +1,19 @@
+@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix bigdata: <http://www.bigdata.com/rdf#> .
+@prefix myns: <http://mynamespace.com#> .
+
+<http://localhost/host1> rdf:type <http://domainnamespace.com/host#Host> .
+<<<http://localhost/host1> rdf:type <http://domainnamespace.com/host#Host>>> myns:creator <http://1.com> .
+<<<http://localhost/host1> rdf:type <http://domainnamespace.com/host#Host>>> myns:creator <http://2.com> .
+
+<http://localhost/host1> myns:connectedTo <http://localhost/switch1> .
+<<<http://localhost/host1> myns:connectedTo <http://localhost/switch1>>> myns:creator <http://1.com> .
+
+<http://localhost/host1> myns:...
[truncated message content] |