From: <mrp...@us...> - 2011-01-11 04:44:31
|
Revision: 4073 http://bigdata.svn.sourceforge.net/bigdata/?rev=4073&view=rev Author: mrpersonick Date: 2011-01-11 04:44:22 +0000 (Tue, 11 Jan 2011) Log Message: ----------- working on nested optional groups Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IVariableOrConstant.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/CompareBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/MathBOp.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestNestedOptionals.java Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IValueExpression.java branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/AND.java branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/BOUND.java branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/NOT.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTree.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTreeBuilder.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/UnsupportedOperatorException.java Added: branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IValueExpression.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IValueExpression.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IValueExpression.java 2011-01-11 04:44:22 UTC (rev 4073) @@ -0,0 +1,25 @@ +package com.bigdata.bop; + +import java.io.Serializable; + +public interface IValueExpression<E> extends BOp, Serializable { + + /** + * Return the <i>as bound</i> value of the variable or constant. The <i>as + * bound</i> value of an {@link IConstant} is the contant's value. The <i>as + * bound</i> value of an {@link IVariable} is the bound value in the given + * {@link IBindingSet} -or- <code>null</code> if the variable is not bound + * in the {@link IBindingSet}. + * + * @param bindingSet + * The binding set. + * + * @return The as bound value of the constant or variable. + * + * @throws IllegalArgumentException + * if this is an {@link IVariable} and the <i>bindingSet</i> is + * <code>null</code>. + */ + E get(IBindingSet bindingSet); + +} Modified: branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IVariableOrConstant.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IVariableOrConstant.java 2011-01-11 01:37:17 UTC (rev 4072) +++ branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/IVariableOrConstant.java 2011-01-11 04:44:22 UTC (rev 4073) @@ -37,7 +37,7 @@ * @author <a href="mailto:tho...@us...">Bryan Thompson</a> * @version $Id$ */ -public interface IVariableOrConstant<E> extends BOp, Serializable { +public interface IVariableOrConstant<E> extends IValueExpression<E> { /** * Return <code>true</code> iff this is a variable. @@ -64,24 +64,6 @@ E get(); /** - * Return the <i>as bound</i> value of the variable or constant. The <i>as - * bound</i> value of an {@link IConstant} is the contant's value. The <i>as - * bound</i> value of an {@link IVariable} is the bound value in the given - * {@link IBindingSet} -or- <code>null</code> if the variable is not bound - * in the {@link IBindingSet}. - * - * @param bindingSet - * The binding set. - * - * @return The as bound value of the constant or variable. - * - * @throws IllegalArgumentException - * if this is an {@link IVariable} and the <i>bindingSet</i> is - * <code>null</code>. - */ - E get(IBindingSet bindingSet); - - /** * Return the name of a variable. * * @throws UnsupportedOperationException Added: branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/AND.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/AND.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/AND.java 2011-01-11 04:44:22 UTC (rev 4073) @@ -0,0 +1,74 @@ +/* + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +*/ +package com.bigdata.bop.constraint; + +import java.util.Map; + +import com.bigdata.bop.BOpBase; +import com.bigdata.bop.BOp; +import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IConstraint; + +/** + * Imposes the constraint <code>x AND y</code>. + */ +public class AND extends BOpConstraint { + + /** + * + */ + private static final long serialVersionUID = -8146965892831895463L; + + /** + * Required deep copy constructor. + */ + public AND(final BOp[] args, final Map<String, Object> annotations) { + super(args, annotations); + } + + /** + * Required deep copy constructor. + */ + public AND(final AND op) { + super(op); + } + + public AND(final IConstraint x, final IConstraint y) { + + super(new BOp[] { x, y }, null/*annocations*/); + + if (x == null || y == null) + throw new IllegalArgumentException(); + + } + + public boolean accept(final IBindingSet s) { + + return ((IConstraint) get(0)).accept(s) + && ((IConstraint) get(1)).accept(s); + + } + +} Added: branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/BOUND.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/BOUND.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/BOUND.java 2011-01-11 04:44:22 UTC (rev 4073) @@ -0,0 +1,73 @@ +/* + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +*/ +package com.bigdata.bop.constraint; + +import java.util.Map; + +import com.bigdata.bop.BOp; +import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IConstraint; +import com.bigdata.bop.IVariable; + +/** + * Imposes the constraint <code>bound(x)</code> for the variable x. + */ +public class BOUND extends BOpConstraint { + + /** + * + */ + private static final long serialVersionUID = -7408654639183330874L; + + /** + * Required deep copy constructor. + */ + public BOUND(final BOp[] args, final Map<String, Object> annotations) { + super(args, annotations); + } + + /** + * Required deep copy constructor. + */ + public BOUND(final BOUND op) { + super(op); + } + + public BOUND(final IVariable x) { + + super(new BOp[] { x }, null/*annocations*/); + + if (x == null) + throw new IllegalArgumentException(); + + } + + public boolean accept(final IBindingSet s) { + + return ((IVariable) get(0)).get(s) != null; + + } + +} Added: branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/NOT.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/NOT.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/constraint/NOT.java 2011-01-11 04:44:22 UTC (rev 4073) @@ -0,0 +1,73 @@ +/* + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +*/ +package com.bigdata.bop.constraint; + +import java.util.Map; + +import com.bigdata.bop.BOpBase; +import com.bigdata.bop.BOp; +import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IConstraint; + +/** + * Imposes the constraint <code>!x</code>. + */ +public class NOT extends BOpConstraint { + + /** + * + */ + private static final long serialVersionUID = -5701967329003122236L; + + /** + * Required deep copy constructor. + */ + public NOT(final BOp[] args, final Map<String, Object> annotations) { + super(args, annotations); + } + + /** + * Required deep copy constructor. + */ + public NOT(final NOT op) { + super(op); + } + + public NOT(final IConstraint x) { + + super(new BOp[] { x }, null/*annocations*/); + + if (x == null) + throw new IllegalArgumentException(); + + } + + public boolean accept(final IBindingSet s) { + + return !((IConstraint) get(0)).accept(s); + + } + +} Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/CompareBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/CompareBOp.java 2011-01-11 01:37:17 UTC (rev 4072) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/CompareBOp.java 2011-01-11 04:44:22 UTC (rev 4073) @@ -30,6 +30,7 @@ import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IValueExpression; import com.bigdata.bop.IVariableOrConstant; import com.bigdata.bop.NV; import com.bigdata.bop.PipelineOp; @@ -73,8 +74,8 @@ super(op); } - public CompareBOp(final IVariableOrConstant<IV> left, - final IVariableOrConstant<IV> right, final CompareOp op) { + public CompareBOp(final IValueExpression<IV> left, + final IValueExpression<IV> right, final CompareOp op) { super(new BOp[] { left, right }, NV.asMap(new NV(Annotations.OP, op))); @@ -85,8 +86,8 @@ public boolean accept(final IBindingSet s) { - final IV left = ((IVariableOrConstant<IV>) get(0)).get(s); - final IV right = ((IVariableOrConstant<IV>) get(1)).get(s); + final IV left = ((IValueExpression<IV>) get(0)).get(s); + final IV right = ((IValueExpression<IV>) get(1)).get(s); if (left == null || right == null) return true; // not yet bound. Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/MathBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/MathBOp.java 2011-01-11 01:37:17 UTC (rev 4072) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/MathBOp.java 2011-01-11 04:44:22 UTC (rev 4073) @@ -27,18 +27,18 @@ import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IValueExpression; import com.bigdata.bop.IVariable; -import com.bigdata.bop.IVariableOrConstant; import com.bigdata.bop.ImmutableBOp; import com.bigdata.bop.NV; import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.internal.IVUtility; /** - * A math expression involving a left and right IVariableOrConstant operand. + * A math expression involving a left and right IValueExpression operand. */ final public class MathBOp extends ImmutableBOp - implements IVariableOrConstant<IV> { + implements IValueExpression<IV> { /** * @@ -55,19 +55,6 @@ } - final public boolean isVar() { - - return ((IVariableOrConstant) get(0)).isVar() || - ((IVariableOrConstant) get(1)).isVar(); - - } - - final public boolean isConstant() { - - return !isVar(); - - } - /** * Required deep copy constructor. * @@ -79,8 +66,8 @@ } - public MathBOp(final IVariableOrConstant<IV> left, - final IVariableOrConstant<IV> right, final MathOp op) { + public MathBOp(final IValueExpression<IV> left, + final IValueExpression<IV> right, final MathOp op) { super(new BOp[] { left, right }, NV.asMap(new NV(Annotations.OP, op))); @@ -98,12 +85,12 @@ // // } - public IVariableOrConstant<IV> left() { - return (IVariableOrConstant<IV>) get(0); + public IValueExpression<IV> left() { + return (IValueExpression<IV>) get(0); } - public IVariableOrConstant<IV> right() { - return (IVariableOrConstant<IV>) get(1); + public IValueExpression<IV> right() { + return (IValueExpression<IV>) get(1); } public MathOp op() { @@ -133,13 +120,11 @@ } - final public boolean equals(final IVariableOrConstant<IV> o) { + final public boolean equals(final IValueExpression<IV> o) { if(!(o instanceof MathBOp)) { - // incomparable types. return false; - } return equals((MathBOp) o); @@ -176,18 +161,6 @@ } - final public IV get() { - - final IV left = left().get(); - final IV right = right().get(); - - if (left == null || right == null) - return null; - - return IVUtility.numericalMath(left, right, op()); - - } - final public IV get(final IBindingSet bindingSet) { final IV left = left().get(bindingSet); @@ -200,10 +173,4 @@ } - final public String getName() { - - throw new UnsupportedOperationException(); - - } - } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java 2011-01-11 01:37:17 UTC (rev 4072) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java 2011-01-11 04:44:22 UTC (rev 4073) @@ -1607,10 +1607,10 @@ if (iv == null) return null; iv1 = new Constant<IV>(iv); - } else if (left instanceof MathExpr) { - iv1 = generateMath((MathExpr) left); - if (iv1 == null) - return null; +// } else if (left instanceof MathExpr) { +// iv1 = generateMath((MathExpr) left); +// if (iv1 == null) +// return null; } else { return null; } @@ -1622,10 +1622,10 @@ if (iv == null) return null; iv2 = new Constant<IV>(iv); - } else if (right instanceof MathExpr) { - iv2 = generateMath((MathExpr) right); - if (iv2 == null) - return null; +// } else if (right instanceof MathExpr) { +// iv2 = generateMath((MathExpr) right); +// if (iv2 == null) +// return null; } else { return null; } @@ -1648,10 +1648,10 @@ if (iv == null) return null; iv1 = new Constant<IV>(iv); - } else if (left instanceof MathExpr) { - iv1 = generateMath((MathExpr) left); - if (iv1 == null) - return null; +// } else if (left instanceof MathExpr) { +// iv1 = generateMath((MathExpr) left); +// if (iv1 == null) +// return null; } else { return null; } @@ -1663,10 +1663,10 @@ if (iv == null) return null; iv2 = new Constant<IV>(iv); - } else if (right instanceof MathExpr) { - iv2 = generateMath((MathExpr) right); - if (iv2 == null) - return null; +// } else if (right instanceof MathExpr) { +// iv2 = generateMath((MathExpr) right); +// if (iv2 == null) +// return null; } else { return null; } Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-01-11 04:44:22 UTC (rev 4073) @@ -0,0 +1,2176 @@ +package com.bigdata.rdf.sail; + +import info.aduna.iteration.CloseableIteration; +import info.aduna.iteration.EmptyIteration; + +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.log4j.Logger; +import org.openrdf.model.Literal; +import org.openrdf.model.URI; +import org.openrdf.model.Value; +import org.openrdf.query.BindingSet; +import org.openrdf.query.Dataset; +import org.openrdf.query.QueryEvaluationException; +import org.openrdf.query.algebra.And; +import org.openrdf.query.algebra.Bound; +import org.openrdf.query.algebra.Compare; +import org.openrdf.query.algebra.Compare.CompareOp; +import org.openrdf.query.algebra.Filter; +import org.openrdf.query.algebra.Group; +import org.openrdf.query.algebra.Join; +import org.openrdf.query.algebra.LeftJoin; +import org.openrdf.query.algebra.MathExpr; +import org.openrdf.query.algebra.MathExpr.MathOp; +import org.openrdf.query.algebra.MultiProjection; +import org.openrdf.query.algebra.Not; +import org.openrdf.query.algebra.Or; +import org.openrdf.query.algebra.Order; +import org.openrdf.query.algebra.Projection; +import org.openrdf.query.algebra.ProjectionElem; +import org.openrdf.query.algebra.ProjectionElemList; +import org.openrdf.query.algebra.QueryModelNode; +import org.openrdf.query.algebra.QueryRoot; +import org.openrdf.query.algebra.SameTerm; +import org.openrdf.query.algebra.StatementPattern; +import org.openrdf.query.algebra.StatementPattern.Scope; +import org.openrdf.query.algebra.TupleExpr; +import org.openrdf.query.algebra.UnaryTupleOperator; +import org.openrdf.query.algebra.Union; +import org.openrdf.query.algebra.ValueConstant; +import org.openrdf.query.algebra.ValueExpr; +import org.openrdf.query.algebra.Var; +import org.openrdf.query.algebra.evaluation.impl.EvaluationStrategyImpl; +import org.openrdf.query.algebra.evaluation.iterator.FilterIterator; +import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; + +import com.bigdata.bop.BOp; +import com.bigdata.bop.Constant; +import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IConstant; +import com.bigdata.bop.IConstraint; +import com.bigdata.bop.IPredicate; +import com.bigdata.bop.IPredicate.Annotations; +import com.bigdata.bop.IValueExpression; +import com.bigdata.bop.IVariable; +import com.bigdata.bop.IVariableOrConstant; +import com.bigdata.bop.NV; +import com.bigdata.bop.PipelineOp; +import com.bigdata.bop.ap.Predicate; +import com.bigdata.bop.constraint.AND; +import com.bigdata.bop.constraint.BOUND; +import com.bigdata.bop.constraint.EQ; +import com.bigdata.bop.constraint.INBinarySearch; +import com.bigdata.bop.constraint.NE; +import com.bigdata.bop.constraint.NOT; +import com.bigdata.bop.constraint.OR; +import com.bigdata.bop.engine.IRunningQuery; +import com.bigdata.bop.engine.QueryEngine; +import com.bigdata.bop.solutions.ISortOrder; +import com.bigdata.btree.IRangeQuery; +import com.bigdata.btree.keys.IKeyBuilderFactory; +import com.bigdata.rdf.internal.DummyIV; +import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.constraints.CompareBOp; +import com.bigdata.rdf.internal.constraints.MathBOp; +import com.bigdata.rdf.lexicon.LexiconRelation; +import com.bigdata.rdf.model.BigdataValue; +import com.bigdata.rdf.sail.BigdataSail.Options; +import com.bigdata.rdf.sail.sop.SOp; +import com.bigdata.rdf.sail.sop.SOp2BOpUtility; +import com.bigdata.rdf.sail.sop.SOpTree; +import com.bigdata.rdf.sail.sop.SOpTree.SOpGroup; +import com.bigdata.rdf.sail.sop.SOpTreeBuilder; +import com.bigdata.rdf.sail.sop.UnsupportedOperatorException; +import com.bigdata.rdf.spo.DefaultGraphSolutionExpander; +import com.bigdata.rdf.spo.ExplicitSPOFilter; +import com.bigdata.rdf.spo.ISPO; +import com.bigdata.rdf.spo.NamedGraphSolutionExpander; +import com.bigdata.rdf.spo.SPOPredicate; +import com.bigdata.rdf.store.AbstractTripleStore; +import com.bigdata.rdf.store.BD; +import com.bigdata.rdf.store.BigdataBindingSetResolverator; +import com.bigdata.relation.accesspath.ElementFilter; +import com.bigdata.relation.accesspath.IAsynchronousIterator; +import com.bigdata.relation.accesspath.IBuffer; +import com.bigdata.relation.accesspath.IElementFilter; +import com.bigdata.relation.rule.IAccessPathExpander; +import com.bigdata.relation.rule.IProgram; +import com.bigdata.relation.rule.IQueryOptions; +import com.bigdata.relation.rule.IRule; +import com.bigdata.relation.rule.Rule; +import com.bigdata.relation.rule.eval.ISolution; +import com.bigdata.relation.rule.eval.RuleStats; +import com.bigdata.search.FullTextIndex; +import com.bigdata.search.IHit; +import com.bigdata.striterator.ChunkedWrappedIterator; +import com.bigdata.striterator.Dechunkerator; +import com.bigdata.striterator.DistinctFilter; +import com.bigdata.striterator.IChunkedOrderedIterator; + +/** + * Extended to rewrite Sesame {@link TupleExpr}s onto native {@link Rule}s and + * to evaluate magic predicates for full text search, etc. Query evaluation can + * proceed either by Sesame 2 evaluation or, if {@link Options#NATIVE_JOINS} is + * enabled, then by translation of Sesame 2 query expressions into native + * {@link IRule}s and native evaluation of those {@link IRule}s. + * + * <h2>Query options</h2> + * The following summarizes how various high-level query language feature are + * mapped onto native {@link IRule}s. + * <dl> + * <dt>DISTINCT</dt> + * <dd>{@link IQueryOptions#isDistinct()}, which is realized using + * {@link DistinctFilter}.</dd> + * <dt>ORDER BY</dt> + * <dd>{@link IQueryOptions#getOrderBy()} is effected by a custom + * {@link IKeyBuilderFactory} which generates sort keys that capture the desired + * sort order from the bindings in an {@link ISolution}. Unless DISTINCT is + * also specified, the generated sort keys are made unique by appending a one up + * long integer to the key - this prevents sort keys that otherwise compare as + * equals from dropping solutions. Note that the SORT is actually imposed by the + * {@link DistinctFilter} using an {@link IKeyBuilderFactory} assembled from the + * ORDER BY constraints. + * + * FIXME BryanT - implement the {@link IKeyBuilderFactory}. + * + * FIXME MikeP - assemble the {@link ISortOrder}[] from the query and set on + * the {@link IQueryOptions}.</dd> + * <dt>OFFSET and LIMIT</dt> + * <dd> + * <p> + * {@link IQueryOptions#getSlice()}, which was effected as a conditional in + * the old "Nested Subquery With Join Threads Task" based on the + * {@link RuleStats#solutionCount}. Query {@link ISolution}s are counted as + * they are generated, but they are only entered into the {@link ISolution} + * {@link IBuffer} when the solutionCount is GE the OFFSET and LT the LIMIT. + * Query evaluation halts once the LIMIT is reached. + * </p> + * <p> + * Note that when DISTINCT and either LIMIT and/or OFFSET are specified + * together, then the LIMIT and OFFSET <strong>MUST</strong> be applied after + * the solutions have been generated since we may have to generate more than + * LIMIT solutions in order to have LIMIT <em>DISTINCT</em> solutions. We + * handle this for now by NOT translating the LIMIT and OFFSET onto the + * {@link IRule} and instead let Sesame close the iterator once it has enough + * solutions. + * </p> + * <p> + * Note that LIMIT and SLICE requires an evaluation plan that provides stable + * results. For a simple query this is achieved by setting + * {@link IQueryOptions#isStable()} to <code>true</code>. + * <p> + * For a UNION query, you must also set {@link IProgram#isParallel()} to + * <code>false</code> to prevent parallelized execution of the {@link IRule}s + * in the {@link IProgram}. + * </p> + * </dd> + * <dt>UNION</dt> + * <dd>A UNION is translated into an {@link IProgram} consisting of one + * {@link IRule} for each clause in the UNION. + * + * FIXME MikeP - implement.</dd> + * </dl> + * <h2>Filters</h2> + * The following provides a summary of how various kinds of FILTER are handled. + * A filter that is not explicitly handled is left untranslated and will be + * applied by Sesame against the generated {@link ISolution}s. + * <p> + * Whenever possible, a FILTER is translated into an {@link IConstraint} on an + * {@link IPredicate} in the generated native {@link IRule}. Some filters are + * essentially JOINs against the {@link LexiconRelation}. Those can be handled + * either as JOINs (generating an additional {@link IPredicate} in the + * {@link IRule}) or as an {@link INBinarySearch} constraint, where the inclusion set is + * pre-populated by some operation on the {@link LexiconRelation}. + * <dl> + * <dt>EQ</dt> + * <dd>Translated into an {@link EQ} constraint on an {@link IPredicate}.</dd> + * <dt>NE</dt> + * <dd>Translated into an {@link NE} constraint on an {@link IPredicate}.</dd> + * <dt>IN</dt> + * <dd>Translated into an {@link INBinarySearch} constraint on an {@link IPredicate}.</dd> + * <dt>OR</dt> + * <dd>Translated into an {@link OR} constraint on an {@link IPredicate}.</dd> + * <dt></dt> + * <dd></dd> + * </dl> + * <h2>Magic predicates</h2> + * <p> + * {@link BD#SEARCH} is the only magic predicate at this time. When the object + * position is bound to a constant, the magic predicate is evaluated once and + * the result is used to generate a set of term identifiers that are matches for + * the token(s) extracted from the {@link Literal} in the object position. Those + * term identifiers are then used to populate an {@link INBinarySearch} constraint. The + * object position in the {@link BD#SEARCH} MUST be bound to a constant. + * </p> + * + * FIXME We are not in fact rewriting the query operation at all, simply + * choosing a different evaluation path as we go. The rewrite should really be + * isolated from the execution, e.g., in its own class. That more correct + * approach is more than I want to get into right now as we will have to define + * variants on the various operators that let us model the native rule system + * directly, e.g., an n-ary IProgram, n-ary IRule operator, an IPredicate + * operator, etc. Then we can handle evaluation using their model with anything + * re-written to our custom operators being caught by our custom evaluate() + * methods and everything else running their default methods. Definitely the + * right approach, and much easier to write unit tests. + * + * @todo REGEX : if there is a "ˆ" literal followed by a wildcard + * AND there are no flags which would cause problems (case-folding, etc) + * then the REGEX can be rewritten as a prefix scan on the lexicon, which + * is very efficient, and converted to an IN filter. When the set size is + * huge we should rewrite it as another tail in the query instead. + * <p> + * Otherwise, regex filters are left outside of the rule. We can't + * optimize that until we generate rules that perform JOINs across the + * lexicon and the spo relations (which we could do, in which case it + * becomes a constraint on that join). + * <p> + * We don't have any indices that are designed to optimize regex scans, + * but we could process a regex scan as a parallel iterator scan against + * the lexicon. + * + * @todo Roll more kinds of filters into the native {@link IRule}s as + * {@link IConstraint}s on {@link IPredicate}s. + * <p> + * isURI(), etc. can be evaluated by testing a bit flag on the term + * identifier, which is very efficient. + * <p> + * + * @todo Verify handling of datatype operations. + * + * @author <a href="mailto:tho...@us...">Bryan Thompson</a> + * @version $Id: BigdataEvaluationStrategyImpl.java 2272 2009-11-04 02:10:19Z + * mrpersonick $ + */ +public class BigdataEvaluationStrategyImpl3 extends EvaluationStrategyImpl { + + /** + * Logger. + */ + protected static final Logger log = + Logger.getLogger(BigdataEvaluationStrategyImpl3.class); + + protected final BigdataTripleSource tripleSource; + + protected final Dataset dataset; + + private final AbstractTripleStore database; + + /** + */ + public BigdataEvaluationStrategyImpl3( + final BigdataTripleSource tripleSource, final Dataset dataset, + final boolean nativeJoins) { + + super(tripleSource, dataset); + + this.tripleSource = tripleSource; + this.dataset = dataset; + this.database = tripleSource.getDatabase(); + this.nativeJoins = nativeJoins; + + } + + /** + * If true, use native evaluation on the sesame operator tree if possible. + */ + private boolean nativeJoins; + + /** + * A set of properties that act as query hints during evaluation. + */ + private Properties queryHints; + + /** + * This is the top-level method called by the SAIL to evaluate a query. + * The TupleExpr parameter here is guaranteed to be the root of the operator + * tree for the query. Query hints are parsed by the SAIL from the + * namespaces in the original query. See {@link QueryHints#NAMESPACE}. + * <p> + * The query root will be handled by the native Sesame evaluation until we + * reach one of three possible top-level operators (union, join, or left + * join) at which point we will take over and translate the sesame operator + * tree into a native bigdata query. If in the process of this translation + * we encounter an operator that we cannot handle natively, we will log + * a warning message and punt to Sesame to let it handle the entire + * query evaluation process (much slower than native evaluation). + */ + public CloseableIteration<BindingSet, QueryEvaluationException> evaluate( + final TupleExpr expr, final BindingSet bindings, + final Properties queryHints) + throws QueryEvaluationException { + + // spit out the whole operator tree + if (log.isInfoEnabled()) { + log.info("operator tree:\n" + expr); + } + + this.queryHints = queryHints; + + if (log.isInfoEnabled()) { + log.info("queryHints:\n" + queryHints); + } + + return super.evaluate(expr, bindings); + + } + + /** + * Translate top-level UNIONs into native bigdata programs for execution. + * This will attempt to look down the operator tree from this point and turn + * the Sesame operators into a set of native rules within a single program. + * <p> + * FIXME A Union is a BinaryTupleOperator composed of two expressions. This + * native evaluation only handles the special case where the left and right + * args are one of: {Join, LeftJoin, StatementPattern, Union}. It's + * possible that the left or right arg is something other than one of those + * operators, in which case we punt to the Sesame evaluation, which + * degrades performance. + * <p> + * FIXME Also, even if the left or right arg is one of the cases we handle, + * it's possible that the translation of that arg into a native rule will + * fail because of an unsupported SPARQL language feature, such as an + * embedded UNION or an unsupported filter type. + */ + @Override + public CloseableIteration<BindingSet, QueryEvaluationException> evaluate( + final Union union, final BindingSet bs) + throws QueryEvaluationException { + + if (!nativeJoins) { + // Use Sesame 2 evaluation + return super.evaluate(union, bs); + } + + if (log.isInfoEnabled()) { + log.info("evaluating top-level Union operator"); + } + + try { + + return evaluateNatively(union, bs); + + } catch (UnsupportedOperatorException ex) { + + // Use Sesame 2 evaluation + + log.warn("could not evaluate natively, using Sesame evaluation"); + + if (log.isInfoEnabled()) { + log.info(ex.getOperator()); + } + + nativeJoins = false; + + return super.evaluate(union, bs); + + } + + } + + /** + * Translate top-level JOINs into native bigdata programs for execution. + * This will attempt to look down the operator tree from this point and turn + * the Sesame operators into a native rule. + * <p> + * FIXME It's possible that the translation of the left or right arg into a + * native rule will fail because of an unsupported SPARQL language feature, + * such as an embedded UNION or an unsupported filter type. + */ + @Override + public CloseableIteration<BindingSet, QueryEvaluationException> evaluate( + final Join join, final BindingSet bs) + throws QueryEvaluationException { + + if (!nativeJoins) { + // Use Sesame 2 evaluation + return super.evaluate(join, bs); + } + + if (log.isInfoEnabled()) { + log.info("evaluating top-level Join operator"); + } + + try { + + return evaluateNatively(join, bs); + + } catch (UnsupportedOperatorException ex) { + + // Use Sesame 2 evaluation + + log.warn("could not evaluate natively, using Sesame evaluation"); + + if (log.isInfoEnabled()) { + log.info(ex.getOperator()); + } + + nativeJoins = false; + + return super.evaluate(join, bs); + + } + + } + + /** + * Translate top-level LEFTJOINs into native bigdata programs for execution. + * This will attempt to look down the operator tree from this point and turn + * the Sesame operators into a native rule. + * <p> + * FIXME It's possible that the translation of the left or right arg into a + * native rule will fail because of an unsupported SPARQL language feature, + * such as an embedded UNION or an unsupported filter type. + */ + @Override + public CloseableIteration<BindingSet, QueryEvaluationException> evaluate( + final LeftJoin leftJoin, final BindingSet bs) + throws QueryEvaluationException { + + if (!nativeJoins) { + // Use Sesame 2 evaluation + return super.evaluate(leftJoin, bs); + } + + if (log.isInfoEnabled()) { + log.info("evaluating top-level LeftJoin operator"); + } + + try { + + return evaluateNatively(leftJoin, bs); + + } catch (UnsupportedOperatorException ex) { + + // Use Sesame 2 evaluation + + log.warn("could not evaluate natively, using Sesame evaluation"); + + if (log.isInfoEnabled()) { + log.info(ex.getOperator()); + } + + nativeJoins = false; + + return super.evaluate(leftJoin, bs); + + } + + } + + private CloseableIteration<BindingSet, QueryEvaluationException> + evaluateNatively(final TupleExpr tupleExpr, final BindingSet bs) + throws QueryEvaluationException, UnsupportedOperatorException { + try { + return _evaluateNatively(tupleExpr, bs); + } catch (UnrecognizedValueException ex) { + return new EmptyIteration<BindingSet, QueryEvaluationException>(); + } catch (QueryEvaluationException ex) { + throw ex; + } catch (Exception ex) { + throw new QueryEvaluationException(ex); + } + } + + private CloseableIteration<BindingSet, QueryEvaluationException> + _evaluateNatively(final TupleExpr root, final BindingSet bs) + throws UnsupportedOperatorException, UnrecognizedValueException, + QueryEvaluationException { + + final SOpTreeBuilder stb = new SOpTreeBuilder(); + + /* + * The sesame operator tree + */ + SOpTree sopTree; + + /* + * Turn the Sesame operator tree into something a little easier + * to work with. + */ + sopTree = stb.collectSOps(root); + + /* + * We need to prune groups that contain terms that do not appear in + * our lexicon. + */ + final Collection<SOpGroup> groupsToPrune = new LinkedList<SOpGroup>(); + + /* + * Iterate through the sop tree and translate statement patterns into + * predicates. + */ + for (SOp sop : sopTree) { + final QueryModelNode op = sop.getOperator(); + if (op instanceof StatementPattern) { + final StatementPattern sp = (StatementPattern) op; + try { + final IPredicate bop = toPredicate((StatementPattern) op); + sop.setBOp(bop); + } catch (UnrecognizedValueException ex) { + /* + * If we encounter a value not in the lexicon, we can + * still continue with the query if the value is in + * either an optional tail or an optional join group (i.e. + * if it appears on the right side of a LeftJoin). + * Otherwise we can stop evaluating right now. + */ + if (sop.isRightSideLeftJoin()) { + groupsToPrune.add(sopTree.getGroup(sop.getGroup())); + } else { + throw ex; + } + } + } + } + + /* + * Prunes the sop tree of optional join groups containing values + * not in the lexicon. + */ + sopTree = stb.pruneGroups(sopTree, groupsToPrune); + + /* + * If we have a filter in the root group (one that can be safely applied + * across the entire query) that we cannot translate into a native + * bigdata constraint, we can run it as a FilterIterator after the + * query has run natively. + */ + final Collection<ValueExpr> sesameFilters = new LinkedList<ValueExpr>(); + + /* + * We need to prune Sesame filters that we cannot translate into native + * constraints (ones that require lexicon joins). + */ + final Collection<SOp> sopsToPrune = new LinkedList<SOp>(); + + /* + * Iterate through the sop tree and translate Sesame ValueExpr operators + * into bigdata IConstraint boperators. + */ + for (SOp sop : sopTree) { + final QueryModelNode op = sop.getOperator(); + if (op instanceof ValueExpr) { + final ValueExpr ve = (ValueExpr) op; + try { + final IConstraint bop = toConstraint(ve); + sop.setBOp(bop); + } catch (UnsupportedOperatorException ex) { + /* + * If we encounter a sesame filter (ValueExpr) that we + * cannot translate, we can safely wrap the entire query + * with a Sesame filter iterator to capture that + * untranslatable value expression. If we are not in the + * root group however, we risk applying the filter to the + * wrong context (for example a filter inside an optional + * join group cannot be applied universally to the entire + * solution). In this case we must punt. + */ + if (sop.getGroup() == SOpTreeBuilder.ROOT_GROUP_ID) { + sopsToPrune.add(sop); + sesameFilters.add(ve); + } else { + throw ex; + } + } + } + } + + /* + * Prunes the sop tree of untranslatable filters. + */ + sopTree = stb.pruneSOps(sopTree, sopsToPrune); + + /* + * Make sure we don't have free text searches searching outside + * their named graph scope. + */ + attachNamedGraphsFilterToSearches(sopTree); + + /* + * Gather variables required by Sesame outside of the query + * evaluation (projection and global sesame filters). + */ + final IVariable[] required = + gatherRequiredVariables(root, sesameFilters); + + final QueryEngine queryEngine = tripleSource.getSail().getQueryEngine(); + + final PipelineOp query; + { + /* + * Note: The ids are assigned using incrementAndGet() so ONE (1) is + * the first id that will be assigned when we pass in ZERO (0) as + * the initial state of the AtomicInteger. + */ + final AtomicInteger idFactory = new AtomicInteger(0); + + // Convert the step to a bigdata operator tree. + query = SOp2BOpUtility.convert(sopTree, idFactory, database, + queryEngine, queryHints); + + if (log.isInfoEnabled()) + log.info(query); + + } + + return _evaluateNatively(query, bs, queryEngine, sesameFilters); + + } + + protected CloseableIteration<BindingSet, QueryEvaluationException> + _evaluateNatively(final PipelineOp query, final BindingSet bs, + final QueryEngine queryEngine, + final Collection<ValueExpr> sesameConstraints) + throws QueryEvaluationException { + + try { + + final IRunningQuery runningQuery = queryEngine.eval(query); + + final IAsynchronousIterator<IBindingSet[]> it1 = + runningQuery.iterator(); + + final IChunkedOrderedIterator<IBindingSet> it2 = + new ChunkedWrappedIterator<IBindingSet>( + new Dechunkerator<IBindingSet>(it1)); + + CloseableIteration<BindingSet, QueryEvaluationException> result = + new Bigdata2Sesame2BindingSetIterator<QueryEvaluationException>( + new BigdataBindingSetResolverator(database, it2).start( + database.getExecutorService())); + + // Wait for the Future (checks for errors). + runningQuery.get(); + + // use the basic filter iterator for remaining filters + if (sesameConstraints != null) { + for (ValueExpr ve : sesameConstraints) { + final Filter filter = new Filter(null, ve); + result = new FilterIterator(filter, result, this); + } + } + + return result; + + } catch (QueryEvaluationException ex) { + throw ex; + } catch (Exception ex) { + throw new QueryEvaluationException(ex); + } + + } + +// /** +// * This is the method that will attempt to take a top-level join or left +// * join and turn it into a native bigdata rule. The Sesame operators Join +// * and LeftJoin share only the common base class BinaryTupleOperator, but +// * other BinaryTupleOperators are not supported by this method. Other +// * specific types of BinaryTupleOperators will cause this method to throw +// * an exception. +// * <p> +// * This method will also turn a single top-level StatementPattern into a +// * rule with one predicate in it. +// * <p> +// * Note: As a pre-condition, the {@link Value}s in the query expression +// * MUST have been rewritten as {@link BigdataValue}s and their term +// * identifiers MUST have been resolved. Any term identifier that remains +// * {@link IRawTripleStore#NULL} is an indication that there is no entry for +// * that {@link Value} in the database. Since the JOINs are required (vs +// * OPTIONALs), that means that there is no solution for the JOINs and an +// * {@link EmptyIteration} is returned rather than evaluating the query. +// * +// * @param join +// * @return native bigdata rule +// * @throws UnsupportedOperatorException +// * this exception will be thrown if the Sesame join contains any +// * SPARQL language constructs that cannot be converted into +// * the bigdata native rule model +// * @throws QueryEvaluationException +// */ +// private IRule createNativeQueryOld(final TupleExpr join) +// throws UnsupportedOperatorException, +// QueryEvaluationException { +// +// if (!(join instanceof StatementPattern || +// join instanceof Join || join instanceof LeftJoin || +// join instanceof Filter)) { +// throw new AssertionError( +// "only StatementPattern, Join, and LeftJoin supported"); +// } +// +// // flattened collection of statement patterns nested within this join, +// // along with whether or not each one is optional +// final Map<StatementPattern, Boolean> stmtPatterns = +// new LinkedHashMap<StatementPattern, Boolean>(); +// // flattened collection of filters nested within this join +// final Collection<Filter> filters = new LinkedList<Filter>(); +// +// // will throw EncounteredUnknownTupleExprException if the join +// // contains something we don't handle yet +//// collectStatementPatterns(join, stmtPatterns, filters); +// +// if (false) { +// for (Map.Entry<StatementPattern, Boolean> entry : +// stmtPatterns.entrySet()) { +// log.debug(entry.getKey() + ", optional=" + entry.getValue()); +// } +// for (Filter filter : filters) { +// log.debug(filter.getCondition()); +// } +// } +// +// // generate tails +// Collection<IPredicate> tails = new LinkedList<IPredicate>(); +// // keep a list of free text searches for later to solve a named graphs +// // problem +// final Map<IPredicate, StatementPattern> searches = +// new HashMap<IPredicate, StatementPattern>(); +// for (Map.Entry<StatementPattern, Boolean> entry : stmtPatterns +// .entrySet()) { +// StatementPattern sp = entry.getKey(); +// boolean optional = entry.getValue(); +// IPredicate tail = toPredicate(sp, optional); +// // encountered a value not in the database lexicon +// if (tail == null) { +// if (log.isDebugEnabled()) { +// log.debug("could not generate tail for: " + sp); +// } +// if (optional) { +// // for optionals, just skip the tail +// continue; +// } else { +// // for non-optionals, skip the entire rule +// return null; +// } +// } +// if (tail.getAccessPathExpander() instanceof FreeTextSearchExpander) { +// searches.put(tail, sp); +// } +// tails.add(tail); +// } +// +// /* +// * When in quads mode, we need to go through the free text searches and +// * make sure that they are properly filtered for the dataset where +// * needed. Joins will take care of this, so we only need to add a filter +// * when a search variable does not appear in any other tails that are +// * non-optional. +// * +// * @todo Bryan seems to think this can be fixed with a DISTINCT JOIN +// * mechanism in the rule evaluation. +// */ +// if (database.isQuads() && dataset != null) { +// for (IPredicate search : searches.keySet()) { +// final Set<URI> graphs; +// StatementPattern sp = searches.get(search); +// switch (sp.getScope()) { +// case DEFAULT_CONTEXTS: { +// /* +// * Query against the RDF merge of zero or more source +// * graphs. +// */ +// graphs = dataset.getDefaultGraphs(); +// break; +// } +// case NAMED_CONTEXTS: { +// /* +// * Query against zero or more named graphs. +// */ +// graphs = dataset.getNamedGraphs(); +// break; +// } +// default: +// throw new AssertionError(); +// } +// if (graphs == null) { +// continue; +// } +// // why would we use a constant with a free text search??? +// if (search.get(0).isConstant()) { +// throw new AssertionError(); +// } +// // get ahold of the search variable +// com.bigdata.bop.Var searchVar = +// (com.bigdata.bop.Var) search.get(0); +// if (log.isDebugEnabled()) { +// log.debug(searchVar); +// } +// // start by assuming it needs filtering, guilty until proven +// // innocent +// boolean needsFilter = true; +// // check the other tails one by one +// for (IPredicate<ISPO> tail : tails) { +// IAccessPathExpander<ISPO> expander = +// tail.getAccessPathExpander(); +// // only concerned with non-optional tails that are not +// // themselves magic searches +// if (expander instanceof FreeTextSearchExpander +// || tail.isOptional()) { +// continue; +// } +// // see if the search variable appears in this tail +// boolean appears = false; +// for (int i = 0; i < tail.arity(); i++) { +// IVariableOrConstant term = tail.get(i); +// if (log.isDebugEnabled()) { +// log.debug(term); +// } +// if (term.equals(searchVar)) { +// appears = true; +// break; +// } +// } +// // if it appears, we don't need a filter +// if (appears) { +// needsFilter = false; +// break; +// } +// } +// // if it needs a filter, add it to the expander +// if (needsFilter) { +// if (log.isDebugEnabled()) { +// log.debug("needs filter: " + searchVar); +// } +// FreeTextSearchExpander expander = (FreeTextSearchExpander) +// search.getAccessPathExpander(); +// expander.addNamedGraphsFilter(graphs); +// } +// } +// } +// +// // generate constraints +// final Collection<IConstraint> constraints = +// new LinkedList<IConstraint>(); +// final Iterator<Filter> filterIt = filters.iterator(); +// while (filterIt.hasNext()) { +// final Filter filter = filterIt.next(); +// final IConstraint constraint = toConstraint(filter.getCondition()); +// if (constraint != null) { +// // remove if we are able to generate a native constraint for it +// if (log.isDebugEnabled()) { +// log.debug("able to generate a constraint: " + constraint); +// } +// filterIt.remove(); +// constraints.add(constraint); +// } +// } +// +// /* +// * FIXME Native slice, DISTINCT, etc. are all commented out for now. +// * Except for ORDER_BY, support exists for all of these features in the +// * native rules, but we need to separate the rewrite of the tupleExpr +// * and its evaluation in order to properly handle this stuff. +// */ +// IQueryOptions queryOptions = QueryOptions.NONE; +// // if (slice) { +// // if (!distinct && !union) { +// // final ISlice slice = new Slice(offset, limit); +// // queryOptions = new QueryOptions(false/* distinct */, +// // true/* stable */, null/* orderBy */, slice); +// // } +// // } else { +// // if (distinct && !union) { +// // queryOptions = QueryOptions.DISTINCT; +// // } +// // } +// +//// if (log.isDebugEnabled()) { +//// for (IPredicate<ISPO> tail : tails) { +//// IAccessPathExpander<ISPO> expander = tail.getAccessPathExpander(); +//// if (expander != null) { +//// IAccessPath<ISPO> accessPath = database.getSPORelation() +//// .getAccessPath(tail); +//// accessPath = expander.getAccessPath(accessPath); +//// IChunkedOrderedIterator<ISPO> it = accessPath.iterator(); +//// while (it.hasNext()) { +//// log.debug(it.next().toString(database)); +//// } +//// } +//// } +//// } +// +// /* +// * Collect a set of variables required beyond just the join (i.e. +// * aggregation, projection, filters, etc.) +// */ +// Set<String> required = new HashSet<String>(); +// +// try { +// +// QueryModelNode p = join; +// while (true) { +// p = p.getParentNode(); +// if (log.isDebugEnabled()) { +// log.debug(p.getClass()); +// } +// if (p instanceof UnaryTupleOperator) { +// required.addAll(collectVariables((UnaryTupleOperator) p)); +// } +// if (p instanceof QueryRoot) { +// break; +// } +// } +// +// if (filters.size() > 0) { +// for (Filter filter : filters) { +// required.addAll(collectVariables((UnaryTupleOperator) filter)); +// } +// } +// +// } catch (Exception ex) { +// throw new QueryEvaluationException(ex); +// } +// +// IVariable[] requiredVars = new IVariable[required.size()]; +// int i = 0; +// for (String v : required) { +// requiredVars[i++] = com.bigdata.bop.Var.var(v); +// } +// +// if (log.isDebugEnabled()) { +// log.debug("required binding names: " + Arrays.toString(requiredVars)); +// } +// +//// if (starJoins) { // database.isQuads() == false) { +//// if (log.isDebugEnabled()) { +//// log.debug("generating star joins"); +//// } +//// tails = generateStarJoins(tails); +//// } +// +// // generate native rule +// IRule rule = new Rule("nativeJoin", +// // @todo should serialize the query string here for the logs. +// null, // head +// tails.toArray(new IPredicate[tails.size()]), queryOptions, +// // constraints on the rule. +// constraints.size() > 0 ? constraints +// .toArray(new IConstraint[constraints.size()]) : null, +// null/* constants */, null/* taskFactory */, requiredVars); +// +// if (BigdataStatics.debug) { +// System.err.println(join.toString()); +// System.err.println(rule.toString()); +// } +// +// // we have filters that we could not translate natively +// if (filters.size() > 0) { +// if (log.isDebugEnabled()) { +// log.debug("could not translate " + filters.size() +// + " filters into native constraints:"); +// for (Filter filter : filters) { +// log.debug("\n" + filter.getCondition()); +// } +// } +// // use the basic filter iterator for remaining filters +//// rule = new ProxyRuleWithSesameFilters(rule, filters); +// } +// +// return rule; +// +// } + + private void attachNamedGraphsFilterToSearches(final SOpTree sopTree) { + + /* + * When in quads mode, we need to go through the free text searches and + * make sure that they are properly filtered for the dataset where + * needed. Joins will take care of this, so we only need to add a filter + * when a search variable does not appear in any other tails that are + * non-optional. + * + * @todo Bryan seems to think this can be fixed with a DISTINCT JOIN + * mechanism in the rule evaluation. + */ + if (database.isQuads() && dataset != null) { +// for (IPredicate search : searches.keySet()) { + for (SOp sop : sopTree) { + final QueryModelNode op = sop.getOperator(); + if (!(op instanceof StatementPattern)) { + continue; + } + final StatementPattern sp = (StatementPattern) op; + final IPredicate pred = (IPredicate) sop.getBOp(); + if (!(pred.getAccessPat... [truncated message content] |