|
From: <mrp...@us...> - 2010-09-22 23:37:57
|
Revision: 3614
http://bigdata.svn.sourceforge.net/bigdata/?rev=3614&view=rev
Author: mrpersonick
Date: 2010-09-22 23:37:50 +0000 (Wed, 22 Sep 2010)
Log Message:
-----------
adding Sesame to BOp conversion
Modified Paths:
--------------
branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IPredicate.java
branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/engine/Rule2BOpUtility.java
branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java
branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBOps.java
Modified: branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IPredicate.java
===================================================================
--- branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IPredicate.java 2010-09-22 23:36:12 UTC (rev 3613)
+++ branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IPredicate.java 2010-09-22 23:37:50 UTC (rev 3614)
@@ -403,4 +403,13 @@
*/
public int hashCode();
+ /**
+ * Sets the {@link com.bigdata.bop.BOp.Annotations#BOP_ID} annotation.
+ *
+ * @param bopId
+ * The bop id.
+ *
+ * @return The newly annotated {@link IPredicate}.
+ */
+ public IPredicate<E> setBOpId(int bopId);
}
Modified: branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/engine/Rule2BOpUtility.java
===================================================================
--- branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/engine/Rule2BOpUtility.java 2010-09-22 23:36:12 UTC (rev 3613)
+++ branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/engine/Rule2BOpUtility.java 2010-09-22 23:37:50 UTC (rev 3614)
@@ -27,26 +27,34 @@
package com.bigdata.bop.engine;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
import java.util.Iterator;
+import java.util.LinkedList;
import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.apache.log4j.Logger;
import com.bigdata.bop.BOp;
+import com.bigdata.bop.BOpContextBase;
+import com.bigdata.bop.BOpUtility;
import com.bigdata.bop.BindingSetPipelineOp;
+import com.bigdata.bop.IConstraint;
import com.bigdata.bop.IPredicate;
-import com.bigdata.bop.IVariableOrConstant;
+import com.bigdata.bop.IVariable;
import com.bigdata.bop.NV;
-import com.bigdata.bop.Var;
import com.bigdata.bop.ap.E;
import com.bigdata.bop.ap.Predicate;
-import com.bigdata.bop.bset.CopyBindingSetOp;
import com.bigdata.bop.bset.StartOp;
import com.bigdata.bop.join.PipelineJoin;
-import com.bigdata.journal.ITx;
import com.bigdata.rdf.sail.BigdataSail;
import com.bigdata.relation.rule.IProgram;
import com.bigdata.relation.rule.IRule;
import com.bigdata.relation.rule.IStep;
import com.bigdata.relation.rule.Program;
-import com.bigdata.relation.rule.Rule;
+import com.bigdata.relation.rule.eval.DefaultEvaluationPlan2;
+import com.bigdata.relation.rule.eval.IRangeCountFactory;
/**
* Utility class converts {@link IRule}s to {@link BOp}s.
@@ -63,6 +71,8 @@
*/
public class Rule2BOpUtility {
+ protected static final Logger log = Logger.getLogger(Rule2BOpUtility.class);
+
/**
* Convert an {@link IStep} into an operator tree. This should handle
* {@link IRule}s and {@link IProgram}s as they are currently implemented
@@ -73,12 +83,11 @@
*
* @return
*/
- public static BindingSetPipelineOp convert(final IStep step, final int startId) {
+ public static BindingSetPipelineOp convert(final IStep step,
+ final int startId, final QueryEngine queryEngine) {
- if (step instanceof Rule)
- return convert((Rule) step, startId);
- else if (step instanceof Program)
- return convert((Program) step);
+ if (step instanceof IRule)
+ return convert((IRule) step, startId, queryEngine);
throw new UnsupportedOperationException();
@@ -91,7 +100,8 @@
*
* @return
*/
- public static BindingSetPipelineOp convert(final Rule rule, final int startId) {
+ public static BindingSetPipelineOp convert(final IRule rule,
+ final int startId, final QueryEngine queryEngine) {
int bopId = startId;
@@ -100,98 +110,119 @@
new NV(Predicate.Annotations.BOP_ID, bopId++),//
}));
- Iterator<Predicate> tails = rule.getTail();
+ /*
+ * First put the tails in the correct order based on the logic in
+ * DefaultEvaluationPlan2.
+ */
+ final BOpContextBase context = new BOpContextBase(queryEngine);
+ final DefaultEvaluationPlan2 plan = new DefaultEvaluationPlan2(
+ new IRangeCountFactory() {
+ public long rangeCount(final IPredicate pred) {
+ return context.getRelation(pred).getAccessPath(pred)
+ .rangeCount(false);
+ }
+
+ }, rule);
+
+ final int[] order = plan.getOrder();
+
+ /*
+ * Map the constraints from the variables they use. This way, we can
+ * properly attach constraints to only the first tail in which the
+ * variable appears. This way we only run the appropriate constraint
+ * once, instead of for every tail.
+ */
+ final Map<IVariable<?>, Collection<IConstraint>> constraintsByVar =
+ new HashMap<IVariable<?>, Collection<IConstraint>>();
+ for (int i = 0; i < rule.getConstraintCount(); i++) {
+ final IConstraint c = rule.getConstraint(i);
+
+ if (log.isDebugEnabled()) {
+ log.debug(c);
+ }
+
+ final Set<IVariable<?>> uniqueVars = new HashSet<IVariable<?>>();
+ final Iterator<IVariable<?>> vars = BOpUtility.getSpannedVariables(c);
+ while (vars.hasNext()) {
+ final IVariable<?> v = vars.next();
+ uniqueVars.add(v);
+ }
+
+ for (IVariable<?> v : uniqueVars) {
+
+ if (log.isDebugEnabled()) {
+ log.debug(v);
+ }
+
+ Collection<IConstraint> constraints = constraintsByVar.get(v);
+ if (constraints == null) {
+ constraints = new LinkedList<IConstraint>();
+ constraintsByVar.put(v, constraints);
+ }
+ constraints.add(c);
+ }
+ }
+
BindingSetPipelineOp left = startOp;
- while (tails.hasNext()) {
-
+ for (int i = 0; i < order.length; i++) {
+
final int joinId = bopId++;
- final Predicate<?> pred = tails.next().setBOpId(bopId++);
+ // assign a bop id to the predicate
+ final IPredicate<?> pred = rule.getTail(order[i]).setBOpId(bopId++);
- System.err.println(pred);
+ /*
+ * Collect all the constraints for this predicate based on which
+ * variables make their first appearance in this tail
+ */
+ final Collection<IConstraint> constraints =
+ new LinkedList<IConstraint>();
+ /*
+ * Peek through the predicate's args to find its variables. Use
+ * these to attach constraints to the join based on the variables
+ * that make their first appearance in this tail.
+ */
+ for (BOp arg : pred.args()) {
+ if (arg instanceof IVariable) {
+ final IVariable<?> v = (IVariable) arg;
+ /*
+ * We do a remove because we don't ever need to run these
+ * constraints again during subsequent joins once they
+ * have been run once at the initial appearance of the
+ * variable.
+ *
+ * FIXME revisit this when we dynamically re-order running
+ * joins
+ */
+ if (constraintsByVar.containsKey(v))
+ constraints.addAll(constraintsByVar.remove(v));
+ }
+ }
+
final BindingSetPipelineOp joinOp = new PipelineJoin<E>(//
left, pred,//
NV.asMap(new NV[] {//
- new NV(Predicate.Annotations.BOP_ID, joinId),//
+ new NV(BOp.Annotations.BOP_ID, joinId),//
+ new NV(PipelineJoin.Annotations.CONSTRAINTS,
+ constraints.size() > 0 ?
+ constraints.toArray(new IConstraint[constraints.size()]) : null),//
+ new NV(PipelineJoin.Annotations.OPTIONAL, pred.isOptional()),//
}));
left = joinOp;
}
+ // just for now while i'm debugging
System.err.println(toString(left));
-// test_query_join2();
-
return left;
}
- public static void test_query_join2() {
-
- final String namespace = "ns";
- final int startId = 1;
- final int joinId1 = 2;
- final int predId1 = 3;
- final int joinId2 = 4;
- final int predId2 = 5;
-
- final BindingSetPipelineOp startOp = new StartOp(new BOp[] {},
- NV.asMap(new NV[] {//
- new NV(Predicate.Annotations.BOP_ID, startId),//
- }));
-
- final Predicate<?> pred1Op = new Predicate<E>(new IVariableOrConstant[] {
- Var.var("x"), Var.var("y") }, NV
- .asMap(new NV[] {//
- new NV(Predicate.Annotations.RELATION_NAME,
- new String[] { namespace }),//
- new NV(Predicate.Annotations.PARTITION_ID,
- Integer.valueOf(-1)),//
- new NV(Predicate.Annotations.OPTIONAL,
- Boolean.FALSE),//
- new NV(Predicate.Annotations.CONSTRAINT, null),//
- new NV(Predicate.Annotations.EXPANDER, null),//
- new NV(Predicate.Annotations.BOP_ID, predId1),//
- new NV(Predicate.Annotations.TIMESTAMP, ITx.READ_COMMITTED),//
- }));
-
- final Predicate<?> pred2Op = new Predicate<E>(new IVariableOrConstant[] {
- Var.var("y"), Var.var("z") }, NV
- .asMap(new NV[] {//
- new NV(Predicate.Annotations.RELATION_NAME,
- new String[] { namespace }),//
- new NV(Predicate.Annotations.PARTITION_ID,
- Integer.valueOf(-1)),//
- new NV(Predicate.Annotations.OPTIONAL,
- Boolean.FALSE),//
- new NV(Predicate.Annotations.CONSTRAINT, null),//
- new NV(Predicate.Annotations.EXPANDER, null),//
- new NV(Predicate.Annotations.BOP_ID, predId2),//
- new NV(Predicate.Annotations.TIMESTAMP, ITx.READ_COMMITTED),//
- }));
-
- final BindingSetPipelineOp join1Op = new PipelineJoin<E>(//
- startOp, pred1Op,//
- NV.asMap(new NV[] {//
- new NV(Predicate.Annotations.BOP_ID, joinId1),//
- }));
-
- final BindingSetPipelineOp join2Op = new PipelineJoin<E>(//
- join1Op, pred2Op,//
- NV.asMap(new NV[] {//
- new NV(Predicate.Annotations.BOP_ID, joinId2),//
- }));
-
- final BindingSetPipelineOp query = join2Op;
-
- System.err.println(toString(query));
-
- }
-
private static String toString(BOp bop) {
StringBuilder sb = new StringBuilder();
@@ -218,6 +249,13 @@
for (BOp arg : args) {
toString(arg, sb, indent+4);
}
+ IConstraint[] constraints =
+ bop.getProperty(PipelineJoin.Annotations.CONSTRAINTS);
+ if (constraints != null) {
+ for (IConstraint c : constraints) {
+ toString(c, sb, indent+4);
+ }
+ }
}
}
@@ -228,6 +266,8 @@
* @param program
*
* @return
+ *
+ * FIXME What is the pattern for UNION?
*/
public static BindingSetPipelineOp convert(final Program program) {
Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java
===================================================================
--- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java 2010-09-22 23:36:12 UTC (rev 3613)
+++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java 2010-09-22 23:37:50 UTC (rev 3614)
@@ -592,6 +592,11 @@
IStep query = createNativeQuery(join);
if (query == null) {
+
+ if (log.isDebugEnabled()) {
+ log.debug("query == null");
+ }
+
return new EmptyIteration<BindingSet, QueryEvaluationException>();
}
@@ -1522,8 +1527,12 @@
result = com.bigdata.bop.Var.var(name);
} else {
final IV iv = val.getIV();
- if (iv == null)
+ if (iv == null) {
+ if (log.isDebugEnabled()) {
+ log.debug("null IV: " + val);
+ }
return null;
+ }
result = new Constant<IV>(iv);
}
return result;
@@ -1584,6 +1593,7 @@
if (log.isDebugEnabled()) {
log.debug("var: " + var);
log.debug("constant: " + constant);
+ log.debug("constant.getIV(): " + constant.getIV());
}
if (var == null || constant == null || constant.getIV() == null) {
if (log.isDebugEnabled()) {
@@ -1644,15 +1654,16 @@
final IStep step)
throws Exception {
+ final QueryEngine queryEngine = tripleSource.getSail().getQueryEngine();
+
final int startId = 1;
- final BindingSetPipelineOp query = Rule2BOpUtility.convert(step, startId);
+ final BindingSetPipelineOp query =
+ Rule2BOpUtility.convert(step, startId, queryEngine);
if (log.isInfoEnabled()) {
log.info(query);
}
- final QueryEngine queryEngine = tripleSource.getSail().getQueryEngine();
-
final UUID queryId = UUID.randomUUID();
final RunningQuery runningQuery = queryEngine.eval(queryId, query,
new LocalChunkMessage<IBindingSet>(queryEngine, queryId,
Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBOps.java
===================================================================
--- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBOps.java 2010-09-22 23:36:12 UTC (rev 3613)
+++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBOps.java 2010-09-22 23:37:50 UTC (rev 3614)
@@ -29,6 +29,7 @@
import java.util.Collection;
import java.util.LinkedList;
import java.util.Properties;
+import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.openrdf.model.Literal;
import org.openrdf.model.URI;
@@ -44,6 +45,7 @@
import org.openrdf.query.TupleQueryResult;
import org.openrdf.query.impl.BindingImpl;
import com.bigdata.rdf.axioms.NoAxioms;
+import com.bigdata.rdf.lexicon.LexiconRelation;
import com.bigdata.rdf.store.BD;
import com.bigdata.rdf.vocab.NoVocabulary;
@@ -137,27 +139,209 @@
"select * " +
"WHERE { " +
" ?s rdf:type ns:Person . " +
- " ?s ns:likes ns:RDF . " +
-// " ?s rdfs:label ?label . " +
+ " ?s ns:likes ?likes . " +
+ " ?s rdfs:label ?label . " +
"}";
final TupleQuery tupleQuery =
cxn.prepareTupleQuery(QueryLanguage.SPARQL, query);
TupleQueryResult result = tupleQuery.evaluate();
- while (result.hasNext()) {
- System.err.println(result.next());
- }
+// while (result.hasNext()) {
+// System.err.println(result.next());
+// }
Collection<BindingSet> solution = new LinkedList<BindingSet>();
solution.add(createBindingSet(new Binding[] {
new BindingImpl("s", mike),
-// new BindingImpl("likes", rdf),
-// new BindingImpl("label", l1)
+ new BindingImpl("likes", rdf),
+ new BindingImpl("label", l1)
}));
solution.add(createBindingSet(new Binding[] {
new BindingImpl("s", bryan),
-// new BindingImpl("likes", rdf),
+ new BindingImpl("likes", rdf),
+ new BindingImpl("label", l2)
+ }));
+
+ compare(result, solution);
+
+ }
+
+ } finally {
+ cxn.close();
+ sail.__tearDownUnitTest();
+ }
+
+ }
+
+ public void testSimpleConstraint() throws Exception {
+
+ final BigdataSail sail = getSail();
+ sail.initialize();
+ final BigdataSailRepository repo = new BigdataSailRepository(sail);
+ final BigdataSailRepositoryConnection cxn =
+ (BigdataSailRepositoryConnection) repo.getConnection();
+ cxn.setAutoCommit(false);
+
+ try {
+
+ final ValueFactory vf = sail.getValueFactory();
+
+ final String ns = BD.NAMESPACE;
+
+ URI jill = new URIImpl(ns+"Jill");
+ URI jane = new URIImpl(ns+"Jane");
+ URI person = new URIImpl(ns+"Person");
+ URI age = new URIImpl(ns+"age");
+ URI IQ = new URIImpl(ns+"IQ");
+ Literal l1 = new LiteralImpl("Jill");
+ Literal l2 = new LiteralImpl("Jane");
+ Literal age1 = vf.createLiteral(20);
+ Literal age2 = vf.createLiteral(30);
+ Literal IQ1 = vf.createLiteral(130);
+ Literal IQ2 = vf.createLiteral(140);
+/**/
+ cxn.setNamespace("ns", ns);
+
+ cxn.add(jill, RDF.TYPE, person);
+ cxn.add(jill, RDFS.LABEL, l1);
+ cxn.add(jill, age, age1);
+ cxn.add(jill, IQ, IQ1);
+ cxn.add(jane, RDF.TYPE, person);
+ cxn.add(jane, RDFS.LABEL, l2);
+ cxn.add(jane, age, age2);
+ cxn.add(jane, IQ, IQ2);
+
+ /*
+ * Note: The either flush() or commit() is required to flush the
+ * statement buffers to the database before executing any operations
+ * that go around the sail.
+ */
+ cxn.flush();//commit();
+ cxn.commit();//
+
+ if (log.isInfoEnabled()) {
+ log.info("\n" + sail.getDatabase().dumpStore());
+ }
+
+ {
+
+ String query =
+ "PREFIX rdf: <"+RDF.NAMESPACE+"> " +
+ "PREFIX rdfs: <"+RDFS.NAMESPACE+"> " +
+ "PREFIX ns: <"+ns+"> " +
+
+ "select * " +
+ "WHERE { " +
+ " ?s rdf:type ns:Person . " +
+ " ?s ns:age ?age . " +
+ " ?s ns:IQ ?iq . " +
+ " ?s rdfs:label ?label . " +
+ " FILTER( ?age < 25 && ?iq > 125 ) . " +
+ "}";
+
+ final TupleQuery tupleQuery =
+ cxn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQueryResult result = tupleQuery.evaluate();
+
+// while (result.hasNext()) {
+// System.err.println(result.next());
+// }
+
+ Collection<BindingSet> solution = new LinkedList<BindingSet>();
+ solution.add(createBindingSet(new Binding[] {
+ new BindingImpl("s", jill),
+ new BindingImpl("age", age1),
+ new BindingImpl("iq", IQ1),
+ new BindingImpl("label", l1)
+ }));
+
+ compare(result, solution);
+
+ }
+
+ } finally {
+ cxn.close();
+ sail.__tearDownUnitTest();
+ }
+
+ }
+
+ public void testSimpleOptional() throws Exception {
+
+ final BigdataSail sail = getSail();
+ sail.initialize();
+ final BigdataSailRepository repo = new BigdataSailRepository(sail);
+ final BigdataSailRepositoryConnection cxn =
+ (BigdataSailRepositoryConnection) repo.getConnection();
+ cxn.setAutoCommit(false);
+
+ try {
+
+ final ValueFactory vf = sail.getValueFactory();
+
+ final String ns = BD.NAMESPACE;
+
+ URI mike = new URIImpl(ns+"Mike");
+ URI bryan = new URIImpl(ns+"Bryan");
+ URI person = new URIImpl(ns+"Person");
+ URI likes = new URIImpl(ns+"likes");
+ URI rdf = new URIImpl(ns+"RDF");
+ Literal l1 = new LiteralImpl("Mike");
+ Literal l2 = new LiteralImpl("Bryan");
+/**/
+ cxn.setNamespace("ns", ns);
+
+ cxn.add(mike, RDF.TYPE, person);
+ cxn.add(mike, likes, rdf);
+ cxn.add(mike, RDFS.LABEL, l1);
+ cxn.add(bryan, RDF.TYPE, person);
+ cxn.add(bryan, likes, rdf);
+// cxn.add(bryan, RDFS.LABEL, l2);
+
+ /*
+ * Note: The either flush() or commit() is required to flush the
+ * statement buffers to the database before executing any operations
+ * that go around the sail.
+ */
+ cxn.flush();//commit();
+ cxn.commit();//
+
+ if (log.isInfoEnabled()) {
+ log.info("\n" + sail.getDatabase().dumpStore());
+ }
+
+ {
+
+ String query =
+ "PREFIX rdf: <"+RDF.NAMESPACE+"> " +
+ "PREFIX rdfs: <"+RDFS.NAMESPACE+"> " +
+ "PREFIX ns: <"+ns+"> " +
+
+ "select * " +
+ "WHERE { " +
+ " ?s rdf:type ns:Person . " +
+ " ?s ns:likes ?likes . " +
+ " OPTIONAL { ?s rdfs:label ?label . } " +
+ "}";
+
+ final TupleQuery tupleQuery =
+ cxn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQueryResult result = tupleQuery.evaluate();
+
+// while (result.hasNext()) {
+// System.err.println(result.next());
+// }
+
+ Collection<BindingSet> solution = new LinkedList<BindingSet>();
+ solution.add(createBindingSet(new Binding[] {
+ new BindingImpl("s", mike),
+ new BindingImpl("likes", rdf),
+ new BindingImpl("label", l1)
+ }));
+ solution.add(createBindingSet(new Binding[] {
+ new BindingImpl("s", bryan),
+ new BindingImpl("likes", rdf),
// new BindingImpl("label", l2)
}));
@@ -172,4 +356,96 @@
}
+ public void testOrEquals() throws Exception {
+
+ final BigdataSail sail = getSail();
+ sail.initialize();
+ final BigdataSailRepository repo = new BigdataSailRepository(sail);
+ final BigdataSailRepositoryConnection cxn =
+ (BigdataSailRepositoryConnection) repo.getConnection();
+ cxn.setAutoCommit(false);
+
+ try {
+
+ final ValueFactory vf = sail.getValueFactory();
+
+ final LexiconRelation lex = sail.getDatabase().getLexiconRelation();
+
+ final String ns = BD.NAMESPACE;
+
+ URI mike = new URIImpl(ns+"Mike");
+ URI bryan = new URIImpl(ns+"Bryan");
+ URI martyn = new URIImpl(ns+"Martyn");
+ URI person = new URIImpl(ns+"Person");
+ URI p = new URIImpl(ns+"p");
+ Literal l1 = new LiteralImpl("Mike");
+ Literal l2 = new LiteralImpl("Bryan");
+ Literal l3 = new LiteralImpl("Martyn");
+/**/
+ cxn.setNamespace("ns", ns);
+
+ cxn.add(mike, RDF.TYPE, person);
+ cxn.add(mike, RDFS.LABEL, l1);
+ cxn.add(bryan, RDF.TYPE, person);
+ cxn.add(bryan, RDFS.COMMENT, l2);
+ cxn.add(martyn, RDF.TYPE, person);
+ cxn.add(martyn, p, l3);
+
+ /*
+ * Note: The either flush() or commit() is required to flush the
+ * statement buffers to the database before executing any operations
+ * that go around the sail.
+ */
+ cxn.flush();//commit();
+ cxn.commit();//
+
+ if (log.isInfoEnabled()) {
+ log.info("\n" + sail.getDatabase().dumpStore());
+ }
+
+ {
+
+ String query =
+ "PREFIX rdf: <"+RDF.NAMESPACE+"> " +
+ "PREFIX rdfs: <"+RDFS.NAMESPACE+"> " +
+ "PREFIX ns: <"+ns+"> " +
+
+ "select * " +
+ "WHERE { " +
+ " ?s rdf:type ns:Person . " +
+ " ?s ?p ?label . " +
+ " FILTER ( ?p = rdfs:label || ?p = rdfs:comment ) . " +
+ "}";
+
+ final TupleQuery tupleQuery =
+ cxn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQueryResult result = tupleQuery.evaluate();
+
+// while (result.hasNext()) {
+// System.err.println(result.next());
+// }
+
+ Collection<BindingSet> solution = new LinkedList<BindingSet>();
+ solution.add(createBindingSet(new Binding[] {
+ new BindingImpl("s", mike),
+ new BindingImpl("p", RDFS.LABEL),
+ new BindingImpl("label", l1)
+ }));
+ solution.add(createBindingSet(new Binding[] {
+ new BindingImpl("s", bryan),
+ new BindingImpl("p", RDFS.COMMENT),
+ new BindingImpl("label", l2)
+ }));
+
+ compare(result, solution);
+
+ }
+
+ } finally {
+ cxn.close();
+ sail.__tearDownUnitTest();
+ }
+
+ }
+
}
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|