|
From: <mrp...@us...> - 2014-09-02 15:01:47
|
Revision: 8635
http://sourceforge.net/p/bigdata/code/8635
Author: mrpersonick
Date: 2014-09-02 15:01:44 +0000 (Tue, 02 Sep 2014)
Log Message:
-----------
Ticket #714: Sesame 2.7. Committing Bryan's changes for inline VALUES.
Modified Paths:
--------------
branches/SESAME_2_7/bigdata/src/java/com/bigdata/bop/join/HashIndexOp.java
branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/BindingsClause.java
branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/CompiledSolutionSetStats.java
branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysis.java
branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/eval/AST2BOpUtility.java
branches/SESAME_2_7/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataComplexSparqlQueryTest.java
Modified: branches/SESAME_2_7/bigdata/src/java/com/bigdata/bop/join/HashIndexOp.java
===================================================================
--- branches/SESAME_2_7/bigdata/src/java/com/bigdata/bop/join/HashIndexOp.java 2014-09-02 14:36:42 UTC (rev 8634)
+++ branches/SESAME_2_7/bigdata/src/java/com/bigdata/bop/join/HashIndexOp.java 2014-09-02 15:01:44 UTC (rev 8635)
@@ -49,6 +49,7 @@
import com.bigdata.relation.accesspath.UnsyncLocalOutputBuffer;
import cutthecrap.utils.striterators.ICloseableIterator;
+import cutthecrap.utils.striterators.SingleValueIterator;
/**
* Operator builds a hash index from the source solutions. Once all source
@@ -100,6 +101,17 @@
*/
final String NAMED_SET_SOURCE_REF = "namedSetSourceRef";
+ /**
+ * An optional attribute specifying the <em>source</em> IBindingSet[]
+ * for the index build operation. Normally, the hash index is built from
+ * the solutions flowing through the pipeline. When this attribute is
+ * specified, the hash index is instead built from the solutions in the
+ * specified IBindingSet[]. Regardless, the solutions flowing through
+ * the pipeline are copied to the sink once the hash index has been
+ * built.
+ */
+ final String BINDING_SETS_SOURCE = "bindingSets";
+
}
/**
@@ -138,20 +150,10 @@
BOp.Annotations.EVALUATION_CONTEXT + "="
+ getEvaluationContext());
}
-// if (getEvaluationContext() != BOpEvaluationContext.CONTROLLER) {
-// throw new IllegalArgumentException(
-// BOp.Annotations.EVALUATION_CONTEXT + "="
-// + getEvaluationContext());
-// }
-
- if (getMaxParallel() != 1) {
- /*
- * Parallel evaluation is not allowed. This operator writes on an
- * object that is not thread-safe for mutation.
- */
+ if (getEvaluationContext() != BOpEvaluationContext.CONTROLLER) {
throw new IllegalArgumentException(
- PipelineOp.Annotations.MAX_PARALLEL + "="
- + getMaxParallel());
+ BOp.Annotations.EVALUATION_CONTEXT + "="
+ + getEvaluationContext());
}
if (!isLastPassRequested()) {
@@ -224,11 +226,11 @@
}
- /**
- * Evaluates the subquery for each source binding set. If the controller
- * operator is interrupted, then the subqueries are cancelled. If a subquery
- * fails, then all subqueries are cancelled.
- */
+ /**
+ * Evaluates the subquery for each source binding set. If the controller
+ * operator is interrupted, then the subqueries are cancelled. If a subquery
+ * fails, then all subqueries are cancelled.
+ */
private static class ChunkTask implements Callable<Void> {
private final BOpContext<IBindingSet> context;
@@ -268,7 +270,7 @@
this.stats = ((NamedSolutionSetStats) context.getStats());
- // Metadata to identify the named solution set.
+ // Metadata to identify the target named solution set.
final INamedSolutionSetRef namedSetRef = (INamedSolutionSetRef) op
.getRequiredProperty(Annotations.NAMED_SET_REF);
@@ -312,8 +314,10 @@
}
// true iff we will build the index from the pipeline.
- this.sourceIsPipeline = null == op
- .getProperty(Annotations.NAMED_SET_SOURCE_REF);
+ this.sourceIsPipeline //
+ = (op.getProperty(Annotations.NAMED_SET_SOURCE_REF) == null)
+ && (op.getProperty(Annotations.BINDING_SETS_SOURCE) == null)
+ ;
}
@@ -390,7 +394,7 @@
src = context.getSource();
- } else {
+ } else if (op.getProperty(Annotations.NAMED_SET_SOURCE_REF) != null) {
/*
* Metadata to identify the optional *source* solution set. When
@@ -403,6 +407,22 @@
src = context.getAlternateSource(namedSetSourceRef);
+ } else if (op.getProperty(Annotations.BINDING_SETS_SOURCE) != null) {
+
+ /*
+ * The IBindingSet[] is directly given. Just wrap it up as an
+ * iterator. It will visit a single chunk of solutions.
+ */
+ final IBindingSet[] bindingSets = (IBindingSet[]) op
+ .getProperty(Annotations.BINDING_SETS_SOURCE);
+
+ src = new SingleValueIterator<IBindingSet[]>(bindingSets);
+
+ } else {
+
+ throw new UnsupportedOperationException(
+ "Source was not specified");
+
}
try {
Modified: branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/BindingsClause.java
===================================================================
--- branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/BindingsClause.java 2014-09-02 14:36:42 UTC (rev 8634)
+++ branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/BindingsClause.java 2014-09-02 15:01:44 UTC (rev 8635)
@@ -166,6 +166,7 @@
}
+ @Override
public String toString(final int indent) {
final LinkedHashSet<IVariable<?>> declaredVars = getDeclaredVariables();
Modified: branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/CompiledSolutionSetStats.java
===================================================================
--- branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/CompiledSolutionSetStats.java 2014-09-02 14:36:42 UTC (rev 8634)
+++ branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/CompiledSolutionSetStats.java 2014-09-02 15:01:44 UTC (rev 8635)
@@ -148,6 +148,7 @@
}
+ @Override
public String toString() {
final StringBuilder sb = new StringBuilder();
Modified: branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysis.java
===================================================================
--- branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysis.java 2014-09-02 14:36:42 UTC (rev 8634)
+++ branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StaticAnalysis.java 2014-09-02 15:01:44 UTC (rev 8635)
@@ -2125,7 +2125,51 @@
}
+ /**
+ * Return the join variables for a VALUES clause (embedded only - not
+ * top-level).
+ *
+ * @param bc The VALUES clause (a bunch of solutions)
+ * @param stats A static analysis of those solutions.
+ * @param vars
+ * @return
+ */
+ public Set<IVariable<?>> getJoinVars(final BindingsClause bc,
+ final ISolutionSetStats stats,
+ final Set<IVariable<?>> vars) {
+ /*
+ * The variables which will be definitely bound based on the solutions
+ * in the VALUES clause.
+ *
+ * Note: Collection is not modifyable, so we copy it.
+ */
+ final Set<IVariable<?>> boundByBindingsClause = new LinkedHashSet<IVariable<?>>(
+ stats.getAlwaysBound());
+
+ /*
+ * The variables which are definitely bound on entry to the join group
+ * in which the VALUES clause appears.
+ */
+ final Set<IVariable<?>> incomingBindings = getDefinitelyIncomingBindings(
+ bc, new LinkedHashSet<IVariable<?>>());
+
+ /*
+ * This is only those variables which are bound on entry into the group
+ * in which the VALUES join appears *and* which are "must" bound
+ * variables projected by the VALUES.
+ *
+ * FIXME Is this the correct semantics? I followed the pattern for SERVICE.
+ */
+ boundByBindingsClause.retainAll(incomingBindings);
+
+ vars.addAll(boundByBindingsClause);
+
+ return vars;
+
+ }
+
+
/**
* Return the join variables for an INCLUDE of a pre-existing named solution
* set.
Modified: branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/eval/AST2BOpUtility.java
===================================================================
--- branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/eval/AST2BOpUtility.java 2014-09-02 14:36:42 UTC (rev 8634)
+++ branches/SESAME_2_7/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/eval/AST2BOpUtility.java 2014-09-02 15:01:44 UTC (rev 8635)
@@ -61,6 +61,7 @@
import com.bigdata.bop.join.JoinAnnotations;
import com.bigdata.bop.join.JoinTypeEnum;
import com.bigdata.bop.join.NestedLoopJoinOp;
+import com.bigdata.bop.join.SolutionSetHashJoinOp;
import com.bigdata.bop.paths.ArbitraryLengthPathOp;
import com.bigdata.bop.paths.ZeroLengthPathOp;
import com.bigdata.bop.rdf.join.ChunkedMaterializationOp;
@@ -90,7 +91,6 @@
import com.bigdata.rdf.internal.constraints.INeedsMaterialization.Requirement;
import com.bigdata.rdf.internal.constraints.InBOp;
import com.bigdata.rdf.internal.constraints.IsBoundBOp;
-import com.bigdata.rdf.internal.constraints.NowBOp;
import com.bigdata.rdf.internal.constraints.ProjectedConstraint;
import com.bigdata.rdf.internal.constraints.SPARQLConstraint;
import com.bigdata.rdf.internal.constraints.TryBeforeMaterializationConstraint;
@@ -154,6 +154,7 @@
import com.bigdata.rdf.spo.SPOPredicate;
import com.bigdata.rdf.store.AbstractTripleStore;
import com.bigdata.relation.accesspath.ElementFilter;
+import com.bigdata.striterator.Chunkerator;
import cutthecrap.utils.striterators.FilterBase;
import cutthecrap.utils.striterators.IFilter;
@@ -1469,14 +1470,184 @@
}
/**
- * TODO Grab the binding sets from the BindingsClause, attach them to
- * the query as a named subquery with a hash index, and then add a
- * named subquery include to the pipeline right here.
+ * This handles a VALUES clause. It grabs the binding sets from the
+ * BindingsClause, attach them to the query as a named subquery with a hash
+ * index, and then add a named subquery include to the pipeline right here.
+ * <p>
+ * The VALUES are interpreted using a solution set hash join. The "plan" for
+ * the hash join of the VALUES with the solutions flowing through the
+ * pipeline is: (a) we take the IBindingSet[] and use a {@link HashIndexOp}
+ * to generate the hash index; and (b) we use a
+ * {@link SolutionSetHashJoinOp} to join the solutions from the pipeline
+ * with those in the hash index. Both JVM and HTree versions of this plan
+ * are supported.
+ * <p>
+ * 1. {@link HashIndexOp} (JVM or HTree): Specify the IBindingSet[] as the
+ * source. When the HashIndexOp runs, it will build a hash index from the
+ * IBindingSet[].
+ * <p>
+ * Note: The join variables need to be set based on the known bound
+ * variables in the context where we will evaluate the solution set hash
+ * join (head of the sub-SELECT, OPTIONAL) and those that are bound by the
+ * solution set hash join.
+ * <p>
+ * Note: The static analysis code needs to examine the definitely, and maybe
+ * produced bindings for the {@link BindingsClause}. See the
+ * {@link ISolutionSetStats} interface and
+ * {@link SolutionSetStatserator#get(IBindingSet[])} for a convenience
+ * method.
+ * <p>
+ * 2. {@link SolutionSetHashJoinOp} (JVM or HTree): Joins the solutions
+ * flowing into the sub-query or update with the solutions from the
+ * HashIndexOp. This will take each solution from the pipeline, probe the
+ * hash index for solutions specified by the VALUES clause, and then do a
+ * JOIN for each such solution that is discovered.
*/
private static PipelineOp addValues(PipelineOp left,
- final BindingsClause bc,
+ final BindingsClause bindingsClause,
final Set<IVariable<?>> doneSet, final AST2BOpContext ctx) {
+ // Convert solutions from VALUES clause to an IBindingSet[].
+ final IBindingSet[] bindingSets = BOpUtility.toArray(
+ new Chunkerator<IBindingSet>(bindingsClause.getBindingSets().iterator()),//
+ null/*stats*/
+ );
+
+ // Static analysis of the VALUES solutions.
+ final ISolutionSetStats bindingsClauseStats = SolutionSetStatserator
+ .get(bindingSets);
+
+ @SuppressWarnings("rawtypes")
+ final Map<IConstraint, Set<IVariable<IV>>> needsMaterialization = new LinkedHashMap<IConstraint, Set<IVariable<IV>>>();
+
+ /*
+ * BindingsClause is an IBindingsProducer, but it should also be
+ * an IJoinNode. That will let us attach constraints
+ * (getJoinConstraints()) and identify the join variables for the VALUES
+ * sub-plan (getJoinVars()).
+ */
+ final IConstraint[] joinConstraints = getJoinConstraints(
+ getJoinConstraints(bindingsClause), needsMaterialization);
+
+ /*
+ * Model the VALUES JOIN by building a hash index over the IBindingSet[]
+ * from the VALUES clause. Then use a solution set hash join to join the
+ * solutions flowing through the pipeline with those in the hash index.
+ */
+ final String solutionSetName = "--values-" + ctx.nextId(); // Unique name.
+
+ final Set<IVariable<?>> joinVarSet = ctx.sa.getJoinVars(bindingsClause,
+ bindingsClauseStats, new LinkedHashSet<IVariable<?>>());
+
+ @SuppressWarnings("rawtypes")
+ final IVariable[] joinVars = joinVarSet.toArray(new IVariable[0]);
+
+// if (joinVars.length == 0) {
+//
+// /*
+// * Note: If there are no join variables then the join will
+// * examine the full N x M cross product of solutions. That is
+// * very inefficient, so we are logging a warning.
+// */
+//
+// log.warn("No join variables: " + subqueryRoot);
+//
+// }
+
+ final INamedSolutionSetRef namedSolutionSet = NamedSolutionSetRefUtility.newInstance(
+ ctx.queryId, solutionSetName, joinVars);
+
+ // VALUES is not optional.
+ final JoinTypeEnum joinType = JoinTypeEnum.Normal;
+
+ // lastPass is required except for normal joins.
+ final boolean lastPass = false;
+
+ // true if we will release the HTree as soon as the join is done.
+ // Note: also requires lastPass.
+ final boolean release = lastPass;
+
+ // join can be pipelined unless last pass evaluation is required
+ final int maxParallel = lastPass ? 1
+ : ctx.maxParallelForSolutionSetHashJoin;
+
+ // Generate the hash index operator.
+ if(ctx.nativeHashJoins) {
+ left = applyQueryHints(new HTreeHashIndexOp(leftOrEmpty(left),//
+ new NV(BOp.Annotations.BOP_ID, ctx.nextId()),//
+ new NV(BOp.Annotations.EVALUATION_CONTEXT,
+ BOpEvaluationContext.CONTROLLER),//
+ new NV(PipelineOp.Annotations.MAX_PARALLEL, 1),// required for lastPass
+ new NV(PipelineOp.Annotations.LAST_PASS, true),// required
+ new NV(PipelineOp.Annotations.SHARED_STATE, true),// live stats.
+ new NV(HTreeHashIndexOp.Annotations.RELATION_NAME, new String[]{ctx.getLexiconNamespace()}),// new NV(HTreeHashIndexOp.Annotations.JOIN_VARS, joinVars),//
+ new NV(HTreeHashIndexOp.Annotations.JOIN_TYPE, joinType),//
+ new NV(HTreeHashIndexOp.Annotations.JOIN_VARS, joinVars),//
+ new NV(HTreeHashIndexOp.Annotations.CONSTRAINTS, joinConstraints),// Note: will be applied by the solution set hash join.
+// new NV(HTreeHashIndexOp.Annotations.SELECT, projectedVars),//
+ new NV(HTreeHashIndexOp.Annotations.BINDING_SETS_SOURCE, bindingSets),// source solutions from VALUES.
+ new NV(HTreeHashIndexOp.Annotations.NAMED_SET_REF, namedSolutionSet)// output named solution set.
+ ), bindingsClause, ctx);
+ } else {
+ left = applyQueryHints(new JVMHashIndexOp(leftOrEmpty(left),//
+ new NV(BOp.Annotations.BOP_ID, ctx.nextId()),//
+ new NV(BOp.Annotations.EVALUATION_CONTEXT,
+ BOpEvaluationContext.CONTROLLER),//
+ new NV(PipelineOp.Annotations.MAX_PARALLEL, 1),// required for lastPass
+ new NV(PipelineOp.Annotations.LAST_PASS, true),// required
+ new NV(PipelineOp.Annotations.SHARED_STATE, true),// live stats.
+ new NV(JVMHashIndexOp.Annotations.JOIN_TYPE, joinType),//
+ new NV(JVMHashIndexOp.Annotations.JOIN_VARS, joinVars),//
+ new NV(JVMHashIndexOp.Annotations.CONSTRAINTS, joinConstraints),// Note: will be applied by the solution set hash join.
+// new NV(HTreeHashIndexOp.Annotations.SELECT, projectedVars),//
+ new NV(HTreeHashIndexOp.Annotations.BINDING_SETS_SOURCE, bindingSets),// source solutions from VALUES.
+ new NV(JVMHashIndexOp.Annotations.NAMED_SET_REF, namedSolutionSet)// output named solution set.
+ ), bindingsClause, ctx);
+ }
+
+ // Generate the solution set hash join operator.
+ if(ctx.nativeHashJoins) {
+ left = applyQueryHints(new HTreeSolutionSetHashJoinOp(
+ leftOrEmpty(left),//
+ new NV(BOp.Annotations.BOP_ID, ctx.nextId()),//
+ new NV(BOp.Annotations.EVALUATION_CONTEXT,
+ BOpEvaluationContext.CONTROLLER),//
+ new NV(PipelineOp.Annotations.MAX_PARALLEL, maxParallel),//
+ new NV(PipelineOp.Annotations.SHARED_STATE, true),// live stats.
+// new NV(HTreeSolutionSetHashJoinOp.Annotations.OPTIONAL, optional),//
+// new NV(HTreeSolutionSetHashJoinOp.Annotations.JOIN_VARS, joinVars),//
+// new NV(HTreeSolutionSetHashJoinOp.Annotations.SELECT, null/*all*/),//
+// new NV(HTreeSolutionSetHashJoinOp.Annotations.CONSTRAINTS, joinConstraints),//
+ new NV(HTreeSolutionSetHashJoinOp.Annotations.RELEASE, release),//
+ new NV(HTreeSolutionSetHashJoinOp.Annotations.LAST_PASS, lastPass),//
+ new NV(HTreeSolutionSetHashJoinOp.Annotations.NAMED_SET_REF, namedSolutionSet)//
+ ), bindingsClause, ctx);
+ } else {
+ left = applyQueryHints(new JVMSolutionSetHashJoinOp(
+ leftOrEmpty(left),//
+ new NV(BOp.Annotations.BOP_ID, ctx.nextId()),//
+ new NV(BOp.Annotations.EVALUATION_CONTEXT,
+ BOpEvaluationContext.CONTROLLER),//
+ new NV(PipelineOp.Annotations.MAX_PARALLEL, maxParallel),//
+ new NV(PipelineOp.Annotations.SHARED_STATE, true),// live stats.
+// new NV(JVMSolutionSetHashJoinOp.Annotations.OPTIONAL, optional),//
+// new NV(JVMSolutionSetHashJoinOp.Annotations.JOIN_VARS, joinVars),//
+// new NV(JVMSolutionSetHashJoinOp.Annotations.SELECT, null/*all*/),//
+// new NV(JVMSolutionSetHashJoinOp.Annotations.CONSTRAINTS, joinConstraints),//
+ new NV(JVMSolutionSetHashJoinOp.Annotations.RELEASE, release),//
+ new NV(JVMSolutionSetHashJoinOp.Annotations.LAST_PASS, lastPass),//
+ new NV(JVMSolutionSetHashJoinOp.Annotations.NAMED_SET_REF, namedSolutionSet)//
+ ), bindingsClause, ctx);
+ }
+
+ /*
+ * For each filter which requires materialization steps, add the
+ * materializations steps to the pipeline and then add the filter to the
+ * pipeline.
+ */
+ left = addMaterializationSteps3(left, doneSet, needsMaterialization,
+ bindingsClause.getQueryHints(), ctx);
+
return left;
}
@@ -2672,7 +2843,7 @@
continue;
} else if (child instanceof BindingsClause) {
/*
- * VALUES clause
+ * FIXME Support VALUES clause
*/
left = addValues(left,
(BindingsClause) child, doneSet, ctx);
Modified: branches/SESAME_2_7/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataComplexSparqlQueryTest.java
===================================================================
--- branches/SESAME_2_7/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataComplexSparqlQueryTest.java 2014-09-02 14:36:42 UTC (rev 8634)
+++ branches/SESAME_2_7/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataComplexSparqlQueryTest.java 2014-09-02 15:01:44 UTC (rev 8635)
@@ -29,12 +29,17 @@
import java.io.IOException;
import java.io.InputStream;
-import java.io.InputStreamReader;
import java.util.Properties;
import org.apache.log4j.Logger;
import org.junit.Test;
+import org.openrdf.model.Literal;
import org.openrdf.model.Resource;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.TupleQueryResult;
import org.openrdf.query.parser.sparql.ComplexSPARQLQueryTest;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryException;
@@ -42,11 +47,14 @@
import org.openrdf.rio.RDFParseException;
import com.bigdata.BigdataStatics;
+import com.bigdata.bop.BOpUtility;
import com.bigdata.journal.BufferMode;
import com.bigdata.journal.IIndexManager;
import com.bigdata.rdf.sail.BigdataSail;
import com.bigdata.rdf.sail.BigdataSail.Options;
import com.bigdata.rdf.sail.BigdataSailRepository;
+import com.bigdata.rdf.sail.BigdataSailTupleQuery;
+import com.bigdata.rdf.sparql.ast.eval.AST2BOpUtility;
/**
* Bigdata integration for the {@link ComplexSPARQLQueryTest}.
@@ -301,11 +309,55 @@
super.testSES1991RANDEvaluation();
}
+ /**
+ * The one is fixed now.
+ */
@Override
@Test
public void testValuesInOptional() throws Exception {
- if (runKnownBadTests)
super.testValuesInOptional();
}
+
+ /**
+ * TODO Write optimizer to pull this BindingsClause out of the join
+ * group and make it global.
+ */
+ public void testRequiredValues() throws Exception {
+ loadTestData("/testdata-query/dataset-ses1692.trig");
+ StringBuilder query = new StringBuilder();
+ query.append(" PREFIX : <http://example.org/>\n");
+ query.append(" SELECT DISTINCT ?a ?name ?isX WHERE { ?b :p1 ?a . ?a :name ?name. ?a a :X . VALUES(?isX) { (:X) } } ");
+ BigdataSailTupleQuery tq = (BigdataSailTupleQuery)
+ conn.prepareTupleQuery(QueryLanguage.SPARQL, query.toString());
+
+ if (logger.isInfoEnabled()) {
+ logger.info("optimized ast:\n"+tq.optimize());
+ logger.info("query plan:\n"+BOpUtility.toString(tq.getASTContainer().getQueryPlan()));
+ }
+
+ TupleQueryResult result = tq.evaluate();
+ assertNotNull(result);
+ assertTrue(result.hasNext());
+
+ int count = 0;
+ while (result.hasNext()) {
+ count++;
+ BindingSet bs = result.next();
+ System.out.println(bs);
+ URI a = (URI)bs.getValue("a");
+ assertNotNull(a);
+ Value isX = bs.getValue("isX");
+ Literal name = (Literal)bs.getValue("name");
+ assertNotNull(name);
+ if (a.stringValue().endsWith("a1")) {
+ assertNotNull(isX);
+ }
+ else if (a.stringValue().endsWith(("a2"))) {
+ assertNull(isX);
+ }
+ }
+ assertEquals(1, count);
+ }
+
}
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|