From: <tho...@us...> - 2010-10-11 19:55:47
|
Revision: 3769 http://bigdata.svn.sourceforge.net/bigdata/?rev=3769&view=rev Author: thompsonbry Date: 2010-10-11 19:55:40 +0000 (Mon, 11 Oct 2010) Log Message: ----------- Change to the test suite harness per MikeP to allow us to run just those tests specified in an inline array. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataEmbeddedFederationSparqlTest.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2010-10-11 18:33:47 UTC (rev 3768) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2010-10-11 19:55:40 UTC (rev 3769) @@ -465,7 +465,7 @@ } - if (log.isDebugEnabled()) { + if (true||log.isDebugEnabled()) { // just for now while i'm debugging System.err.println("rule=" + rule + "\nquery=" + BOpUtility.toString(left)); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataEmbeddedFederationSparqlTest.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataEmbeddedFederationSparqlTest.java 2010-10-11 18:33:47 UTC (rev 3768) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataEmbeddedFederationSparqlTest.java 2010-10-11 19:55:40 UTC (rev 3769) @@ -28,18 +28,15 @@ package com.bigdata.rdf.sail.tck; import java.io.File; -import java.util.Enumeration; import java.util.Properties; import junit.framework.Test; import junit.framework.TestSuite; - import net.jini.config.ConfigurationException; import org.openrdf.query.Dataset; import org.openrdf.query.parser.sparql.ManifestTest; import org.openrdf.query.parser.sparql.SPARQLQueryTest; -import org.openrdf.query.parser.sparql.SPARQLQueryTest.Factory; import org.openrdf.repository.Repository; import org.openrdf.repository.RepositoryException; import org.openrdf.repository.dataset.DatasetRepository; @@ -76,10 +73,11 @@ } /** - * Skip the dataset tests for now until we can figure out what is wrong - * with them. + * Skip the dataset tests for now until we can figure out what is wrong with + * them. * - * @todo FIXME fix the dataset tests + * FIXME Fix the dataset tests. There is some problem in how the data to be + * loaded into the fixture is being resolved in these tests. */ public static Test suite() throws Exception { @@ -91,14 +89,20 @@ TestSuite suite1 = suiteEmbeddedFederation(); - if (!hideDatasetTests) { - - return suite1; - + // Only run the specified tests? + if (!testURIs.isEmpty()) { + final TestSuite suite = new TestSuite(); + for (String s : testURIs) { + suite.addTest(getSingleTest(suite1, s)); + } + return suite; } - return filterOutDataSetTests(suite1); + if(hideDatasetTests) + suite1 = filterOutDataSetTests(suite1); + return suite1; + } /** Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java 2010-10-11 18:33:47 UTC (rev 3768) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java 2010-10-11 19:55:40 UTC (rev 3769) @@ -28,16 +28,20 @@ package com.bigdata.rdf.sail.tck; import info.aduna.io.IOUtil; -import java.io.File; + import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.util.Arrays; import java.util.Collection; import java.util.Enumeration; +import java.util.Iterator; import java.util.Properties; + import junit.framework.Test; +import junit.framework.TestCase; import junit.framework.TestSuite; + import org.openrdf.query.Dataset; import org.openrdf.query.parser.sparql.ManifestTest; import org.openrdf.query.parser.sparql.SPARQLQueryTest; @@ -46,6 +50,7 @@ import org.openrdf.repository.dataset.DatasetRepository; import org.openrdf.repository.sail.SailRepository; import org.openrdf.sail.memory.MemoryStore; + import com.bigdata.btree.keys.CollatorEnum; import com.bigdata.btree.keys.StrengthEnum; import com.bigdata.journal.BufferMode; @@ -53,9 +58,11 @@ import com.bigdata.rdf.sail.BigdataSail; import com.bigdata.rdf.sail.BigdataSailRepository; import com.bigdata.rdf.sail.BigdataSail.Options; -import com.bigdata.rdf.store.LocalTripleStore; -import com.bigdata.relation.AbstractResource; +import cutthecrap.utils.striterators.Expander; +import cutthecrap.utils.striterators.SingleValueIterator; +import cutthecrap.utils.striterators.Striterator; + /** * Test harness for running the SPARQL test suites. * @@ -87,13 +94,14 @@ "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-9", }); - private static String datasetTests = "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/dataset"; - +// private static String datasetTests = "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/dataset"; + /** - * Skip the dataset tests for now until we can figure out what is wrong - * with them. + * Skip the dataset tests for now until we can figure out what is wrong with + * them. * - * @todo FIXME fix the dataset tests + * FIXME Fix the dataset tests. There is some problem in how the data to be + * loaded into the fixture is being resolved in these tests. */ public static Test suite() throws Exception { @@ -105,17 +113,31 @@ TestSuite suite1 = suiteLTSWithPipelineJoins(); - if (!hideDatasetTests) { - - return suite1; - + // Only run the specified tests? + if (!testURIs.isEmpty()) { + final TestSuite suite = new TestSuite(); + for (String s : testURIs) { + suite.addTest(getSingleTest(suite1, s)); + } + return suite; } - return filterOutDataSetTests(suite1); + if(hideDatasetTests) + suite1 = filterOutDataSetTests(suite1); + return suite1; + } - - static protected Test filterOutDataSetTests(TestSuite suite1) { + + /** + * Hack filters out the "dataset" tests. + * + * @param suite1 + * The test suite. + * + * @return The test suite without the data set tests. + */ + static protected TestSuite filterOutDataSetTests(final TestSuite suite1) { final TestSuite suite2 = new TestSuite(suite1.getName()); @@ -136,7 +158,74 @@ return suite2; } + + /** + * An array of URIs for tests to be run. When null or empty the default test + * suite is run. When specified, only the tests matching these test URIs are + * run. + */ + static final protected Collection<String> testURIs = Arrays.asList(new String[] { +/* +// busted with EvalStrategy1 + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#nested-opt-2", + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#filter-scope-1", + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#join-scope-1", + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional/manifest#dawg-optional-complex-4", + +// busted with EvalStrategy2 with LeftJoin enabled + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-12", + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#nested-opt-1", + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#opt-filter-1", + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#opt-filter-2", + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional/manifest#dawg-optional-complex-3", + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional-filter/manifest#dawg-optional-filter-001", + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional-filter/manifest#dawg-optional-filter-004", +*/ +// Dataset crap + // "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/i18n/manifest#normalization-1" + + // "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/dataset/manifest#dawg-dataset-01" + +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2//manifest#", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#dawg-str-1", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#dawg-str-2", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#dawg-datatype-1", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm-simple", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm-eq", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm-not-eq", +// +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-equals/manifest#eq-graph-1", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-equals/manifest#eq-graph-2", + + /* + * busted with scale-out quads query. + */ +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional/manifest#dawg-union-001", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/graph/manifest#dawg-graph-07", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/graph/manifest#dawg-graph-11", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-star-1" + }); + protected static BigdataSparqlTest getSingleTest(TestSuite suite, String testURI) throws Exception { + + BigdataSparqlTest test = null; +// TestSuite suite = (TestSuite) BigdataSparqlTest.suite(false); + Enumeration e1 = suite.tests(); + while (e1.hasMoreElements()) { + suite = (TestSuite) e1.nextElement(); + Enumeration e2 = suite.tests(); + while (e2.hasMoreElements()) { + test = (BigdataSparqlTest) e2.nextElement(); + if (testURI.equals(test.getTestURI())) { + return test; + } + } + } + + throw new RuntimeException("could not find a test with that URI"); + + } + /** * Return the test suite. */ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dm...@us...> - 2010-10-12 14:59:49
|
Revision: 3773 http://bigdata.svn.sourceforge.net/bigdata/?rev=3773&view=rev Author: dmacgbr Date: 2010-10-12 14:59:42 +0000 (Tue, 12 Oct 2010) Log Message: ----------- Make JiniClient static in test class. Prefer logger over stdout/stderr in Rule2BOpUtility Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataFederationSparqlTest.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2010-10-12 14:33:04 UTC (rev 3772) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2010-10-12 14:59:42 UTC (rev 3773) @@ -487,7 +487,7 @@ if (true||log.isDebugEnabled()) { // just for now while i'm debugging - System.err.println("rule=" + rule + "\nquery=" + log.info ("rule=" + rule + "\nquery=" + BOpUtility.toString(left)); } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataFederationSparqlTest.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataFederationSparqlTest.java 2010-10-12 14:33:04 UTC (rev 3772) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataFederationSparqlTest.java 2010-10-12 14:59:42 UTC (rev 3773) @@ -68,7 +68,15 @@ { super ( URI, name, query, results, dataSet, laxCardinality ) ; } - + + @Override public void runTest () + throws Exception + { + _logger.info ( String.format ( ">>>>> Running test: %s", testURI ) ) ; + super.runTest () ; + _logger.info ( String.format ( ">>>>> Completed test: %s", testURI ) ) ; + } + @Override public void tearDown () throws Exception { @@ -140,7 +148,8 @@ private static final Logger _logger = Logger.getLogger ( BigdataFederationSparqlTest.class ) ; - private JiniFederation<Object> _fed = null ; + private static JiniFederation<Object> _fed = null ; + private static Properties _properties = null ; + private ScaleOutTripleStore _ts = null ; - private Properties _properties = null ; } \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2010-10-25 15:43:24
|
Revision: 3844 http://bigdata.svn.sourceforge.net/bigdata/?rev=3844&view=rev Author: mrpersonick Date: 2010-10-25 15:43:17 +0000 (Mon, 25 Oct 2010) Log Message: ----------- testing single tail rules Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestMultiGraphs.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/org/openrdf/query/parser/sparql/SPARQLQueryTest.java Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSesameMultiGraphs.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java 2010-10-25 15:40:55 UTC (rev 3843) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl.java 2010-10-25 15:43:17 UTC (rev 3844) @@ -466,7 +466,7 @@ return new EmptyIteration<BindingSet, QueryEvaluationException>(); } - return execute(query, bindings); + return execute(query); } catch (UnknownOperatorException ex) { @@ -577,7 +577,7 @@ return new EmptyIteration<BindingSet, QueryEvaluationException>(); } - return execute(query, bindings); + return execute(query); } catch (UnknownOperatorException ex) { @@ -683,7 +683,7 @@ return new EmptyIteration<BindingSet, QueryEvaluationException>(); } - return execute(query, bindings); + return execute(query); } catch (UnknownOperatorException ex) { @@ -1668,16 +1668,8 @@ * * @throws QueryEvaluationException */ -// protected CloseableIteration<BindingSet, QueryEvaluationException> execute( -// final IStep step) -// throws Exception { -// -// return execute(step, null); -// -// } - protected CloseableIteration<BindingSet, QueryEvaluationException> execute( - final IStep step, final BindingSet constants) + final IStep step) throws Exception { final QueryEngine queryEngine = tripleSource.getSail().getQueryEngine(); @@ -1721,7 +1713,7 @@ CloseableIteration<BindingSet, QueryEvaluationException> result = new Bigdata2Sesame2BindingSetIterator<QueryEvaluationException>( new BigdataBindingSetResolverator(database, it2).start(database - .getExecutorService()), constants); + .getExecutorService())); try { // Wait for the Future (checks for errors). @@ -1842,6 +1834,10 @@ final StatementPattern sp, final BindingSet bindings) throws QueryEvaluationException { + if (sp.getParentNode() instanceof Projection) { + return evaluateSingleTailRule(sp, bindings); + } + if (log.isDebugEnabled()) { log.debug("evaluating statement pattern:\n" + sp); } @@ -1873,39 +1869,38 @@ } -// /** -// * Override evaluation of StatementPatterns to recognize magic search -// * predicate. -// */ -// @Override -// public CloseableIteration<BindingSet, QueryEvaluationException> evaluate( -// final StatementPattern sp, final BindingSet bindings) -// throws QueryEvaluationException { -// -// // no check against the nativeJoins property here because we are simply -// // using the native execution model to take care of magic searches. -// -// if (log.isDebugEnabled()) { -// log.debug("evaluating statement pattern:\n" + sp); -// } -// -// final IStep query = createNativeQuery(sp); -// -// if (query == null) { -// return new EmptyIteration<BindingSet, QueryEvaluationException>(); -// } -// -// try { -// -// return execute(query, bindings); -// -// } catch (Exception ex) { -// -// throw new QueryEvaluationException(ex); -// -// } -// -// } + /** + * Override evaluation of StatementPatterns to recognize magic search + * predicate. + */ + public CloseableIteration<BindingSet, QueryEvaluationException> evaluateSingleTailRule( + final StatementPattern sp, final BindingSet bindings) + throws QueryEvaluationException { + + // no check against the nativeJoins property here because we are simply + // using the native execution model to take care of magic searches. + + if (log.isDebugEnabled()) { + log.debug("evaluating statement pattern:\n" + sp); + } + + final IStep query = createNativeQuery(sp); + + if (query == null) { + return new EmptyIteration<BindingSet, QueryEvaluationException>(); + } + + try { + + return execute(query); + + } catch (Exception ex) { + + throw new QueryEvaluationException(ex); + + } + + } /** Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestMultiGraphs.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestMultiGraphs.java 2010-10-25 15:40:55 UTC (rev 3843) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestMultiGraphs.java 2010-10-25 15:43:17 UTC (rev 3844) @@ -163,7 +163,7 @@ "PREFIX rdfs: <"+RDFS.NAMESPACE+"> " + "PREFIX ns: <"+ns+"> " + - "select ?p ?o " + + "select distinct ?p ?o " + "WHERE { " + // " ?s rdf:type ns:Person . " + " ns:Mike ?p ?o . " + Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSesameMultiGraphs.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSesameMultiGraphs.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSesameMultiGraphs.java 2010-10-25 15:43:17 UTC (rev 3844) @@ -0,0 +1,163 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2007. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on Sep 16, 2009 + */ + +package com.bigdata.rdf.sail; + +import org.openrdf.model.URI; +import org.openrdf.model.ValueFactory; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.RDFS; +import org.openrdf.query.QueryLanguage; +import org.openrdf.query.TupleQuery; +import org.openrdf.query.TupleQueryResult; +import org.openrdf.repository.sail.SailRepository; +import org.openrdf.repository.sail.SailRepositoryConnection; +import org.openrdf.sail.Sail; +import org.openrdf.sail.memory.MemoryStore; + +/** + * @author <a href="mailto:mrp...@us...">Mike Personick</a> + * @version $Id$ + */ +public class TestSesameMultiGraphs { + + public static void main(String[] args) throws Exception { + + final Sail sail; + final SailRepository repo; + final SailRepositoryConnection cxn; + + sail = new MemoryStore(); + repo = new SailRepository(sail); + + repo.initialize(); + cxn = repo.getConnection(); + cxn.setAutoCommit(false); + + try { + + final ValueFactory vf = sail.getValueFactory(); + + final String ns = "http://namespace/"; + + URI a = vf.createURI(ns+"a"); + URI b = vf.createURI(ns+"b"); + URI c = vf.createURI(ns+"c"); + URI g1 = vf.createURI(ns+"graph1"); + URI g2 = vf.createURI(ns+"graph2"); +/**/ + cxn.setNamespace("ns", ns); + + cxn.add(a, b, c, g1, g2); + + /* + * Note: The either flush() or commit() is required to flush the + * statement buffers to the database before executing any operations + * that go around the sail. + */ + cxn.commit();// + + { + + String query = + "PREFIX rdf: <"+RDF.NAMESPACE+"> " + + "PREFIX rdfs: <"+RDFS.NAMESPACE+"> " + + "PREFIX ns: <"+ns+"> " + + + "select ?p ?o " + + "WHERE { " + + " ns:a ?p ?o . " + + "}"; + + final TupleQuery tupleQuery = + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + + System.err.println("no dataset specified, RDF-MERGE, should produce one solution:"); + while (result.hasNext()) { + System.err.println(result.next()); + } + + } + + { + + String query = + "PREFIX rdf: <"+RDF.NAMESPACE+"> " + + "PREFIX rdfs: <"+RDFS.NAMESPACE+"> " + + "PREFIX ns: <"+ns+"> " + + + "select ?p ?o " + + "from <"+g1+">" + + "from <"+g2+">" + + "WHERE { " + + " ns:a ?p ?o . " + + "}"; + + final TupleQuery tupleQuery = + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + + System.err.println("default graph query, RDF-MERGE, should produce one solution:"); + while (result.hasNext()) { + System.err.println(result.next()); + } + + } + + { + + String query = + "PREFIX rdf: <"+RDF.NAMESPACE+"> " + + "PREFIX rdfs: <"+RDFS.NAMESPACE+"> " + + "PREFIX ns: <"+ns+"> " + + + "select ?p ?o " + + "from named <"+g1+">" + + "from named <"+g2+">" + + "WHERE { " + + " graph ?g { ns:a ?p ?o . } " + + "}"; + + final TupleQuery tupleQuery = + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + TupleQueryResult result = tupleQuery.evaluate(); + + System.err.println("named graph query, no RDF-MERGE, should produce two solutions:"); + while (result.hasNext()) { + System.err.println(result.next()); + } + + } + + } finally { + cxn.close(); + sail.shutDown(); + } + + } + +} Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java 2010-10-25 15:40:55 UTC (rev 3843) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java 2010-10-25 15:43:17 UTC (rev 3844) @@ -28,6 +28,7 @@ package com.bigdata.rdf.sail.tck; import info.aduna.io.IOUtil; +import info.aduna.iteration.Iterations; import java.io.InputStream; import java.io.InputStreamReader; @@ -36,15 +37,27 @@ import java.util.Collection; import java.util.Enumeration; import java.util.Properties; +import java.util.Set; import junit.framework.Test; import junit.framework.TestSuite; +import org.apache.log4j.Logger; +import org.openrdf.model.Statement; +import org.openrdf.query.BooleanQuery; import org.openrdf.query.Dataset; +import org.openrdf.query.GraphQuery; +import org.openrdf.query.GraphQueryResult; +import org.openrdf.query.Query; +import org.openrdf.query.QueryLanguage; +import org.openrdf.query.TupleQuery; +import org.openrdf.query.TupleQueryResult; import org.openrdf.query.parser.sparql.ManifestTest; import org.openrdf.query.parser.sparql.SPARQLQueryTest; import org.openrdf.repository.Repository; +import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.RepositoryException; +import org.openrdf.repository.RepositoryResult; import org.openrdf.repository.dataset.DatasetRepository; import org.openrdf.repository.sail.SailRepository; import org.openrdf.sail.memory.MemoryStore; @@ -65,6 +78,9 @@ */ public class BigdataSparqlTest extends SPARQLQueryTest { + static protected final Logger log = Logger.getLogger(BigdataSparqlTest.class); + + /** * We cannot use inlining for these test because we do normalization on * numeric values and these tests test for syntatic differences, i.e. @@ -168,22 +184,22 @@ * run. */ static final Collection<String> testURIs = Arrays.asList(new String[] { -/* + // busted with EvalStrategy1 - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#nested-opt-2", - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#filter-scope-1", - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#join-scope-1", - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional/manifest#dawg-optional-complex-4", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#nested-opt-2", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#filter-scope-1", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#join-scope-1", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional/manifest#dawg-optional-complex-4", // busted with EvalStrategy2 with LeftJoin enabled - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-12", - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#nested-opt-1", - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#opt-filter-1", - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#opt-filter-2", - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional/manifest#dawg-optional-complex-3", - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional-filter/manifest#dawg-optional-filter-001", - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional-filter/manifest#dawg-optional-filter-004", -*/ +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-12", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#nested-opt-1", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#opt-filter-1", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest#opt-filter-2", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional/manifest#dawg-optional-complex-3", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional-filter/manifest#dawg-optional-filter-001", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional-filter/manifest#dawg-optional-filter-004", + // Dataset crap // "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/i18n/manifest#normalization-1", @@ -207,6 +223,7 @@ // "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/basic/manifest#prefix-name-1",//OK // "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/basic/manifest#spoo-1",//BOOM +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/graph/manifest#dawg-graph-05", }); /** @@ -424,11 +441,6 @@ super.setUp(); } - @Override - public void runTest() throws Exception { - super.runTest(); - } - public Repository getRepository() { return dataRep; } @@ -448,5 +460,43 @@ } +// @Override +// protected void runTest() +// throws Exception +// { +// RepositoryConnection con = getQueryConnection(dataRep); +// try { +// +// log.info("database dump:"); +// RepositoryResult<Statement> stmts = con.getStatements(null, null, null, false); +// while (stmts.hasNext()) { +// log.info(stmts.next()); +// } +// log.info("dataset:\n" + dataset); +// +// String queryString = readQueryString(); +// log.info("query:\n" + getQueryString()); +// +// Query query = con.prepareQuery(QueryLanguage.SPARQL, queryString, queryFileURL); +// if (dataset != null) { +// query.setDataset(dataset); +// } +// +// if (query instanceof TupleQuery) { +// TupleQueryResult queryResult = ((TupleQuery)query).evaluate(); +// while (queryResult.hasNext()) { +// log.info("query result:\n" + queryResult.next()); +// } +// } +// +// } +// finally { +// con.close(); +// } +// +// super.runTest(); +// } + + } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/org/openrdf/query/parser/sparql/SPARQLQueryTest.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/org/openrdf/query/parser/sparql/SPARQLQueryTest.java 2010-10-25 15:40:55 UTC (rev 3843) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/org/openrdf/query/parser/sparql/SPARQLQueryTest.java 2010-10-25 15:43:17 UTC (rev 3844) @@ -435,7 +435,7 @@ } } - private String readQueryString() + protected String readQueryString() throws IOException { InputStream stream = new URL(queryFileURL).openStream(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-01-12 19:38:04
|
Revision: 4078 http://bigdata.svn.sourceforge.net/bigdata/?rev=4078&view=rev Author: mrpersonick Date: 2011-01-12 19:37:57 +0000 (Wed, 12 Jan 2011) Log Message: ----------- allowing non-native Sesame filters to run Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTreeBuilder.java Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSesameFilters.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-01-12 18:25:21 UTC (rev 4077) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-01-12 19:37:57 UTC (rev 4078) @@ -548,7 +548,7 @@ * bigdata constraint, we can run it as a FilterIterator after the * query has run natively. */ - final Collection<ValueExpr> sesameFilters = new LinkedList<ValueExpr>(); + final Collection<Filter> sesameFilters = new LinkedList<Filter>(); /* * We need to prune Sesame filters that we cannot translate into native @@ -563,7 +563,19 @@ for (SOp sop : sopTree) { final QueryModelNode op = sop.getOperator(); if (op instanceof ValueExpr) { + /* + * If we have a raw ValueExpr and not a Filter we know it must + * be the condition of a LeftJoin, in which case we cannot + * use the Sesame FilterIterator to safely evaluate it. A + * UnsupportedOperatorException here must just flow through + * to Sesame evaluation of the entire query. + */ final ValueExpr ve = (ValueExpr) op; + final IConstraint bop = toConstraint(ve); + sop.setBOp(bop); + } else if (op instanceof Filter) { + final Filter filter = (Filter) op; + final ValueExpr ve = filter.getCondition(); try { final IConstraint bop = toConstraint(ve); sop.setBOp(bop); @@ -580,7 +592,7 @@ */ if (sop.getGroup() == SOpTreeBuilder.ROOT_GROUP_ID) { sopsToPrune.add(sop); - sesameFilters.add(ve); + sesameFilters.add(filter); } else { throw ex; } @@ -635,7 +647,7 @@ protected CloseableIteration<BindingSet, QueryEvaluationException> _evaluateNatively(final PipelineOp query, final BindingSet bs, final QueryEngine queryEngine, - final Collection<ValueExpr> sesameConstraints) + final Collection<Filter> sesameFilters) throws QueryEvaluationException { try { @@ -658,10 +670,12 @@ runningQuery.get(); // use the basic filter iterator for remaining filters - if (sesameConstraints != null) { - for (ValueExpr ve : sesameConstraints) { - final Filter filter = new Filter(null, ve); - result = new FilterIterator(filter, result, this); + if (sesameFilters != null) { + for (Filter f : sesameFilters) { + if (log.isDebugEnabled()) { + log.debug("attaching sesame filter: " + f); + } + result = new FilterIterator(f, result, this); } } @@ -1093,7 +1107,7 @@ } protected IVariable[] gatherRequiredVariables(final TupleExpr root, - final Collection<ValueExpr> sesameFilters) { + final Collection<Filter> sesameFilters) { /* * Collect a set of variables required beyond just the join (i.e. @@ -1116,8 +1130,8 @@ } if (sesameFilters.size() > 0) { - for (ValueExpr ve : sesameFilters) { - required.addAll(collectVariables((UnaryTupleOperator) ve)); + for (Filter f : sesameFilters) { + required.addAll(collectVariables(f.getCondition())); } } @@ -1140,48 +1154,47 @@ * they can be added to the list of required variables in the query for * correct binding set pruning. * - * @param uto + * @param op * the <code>UnaryTupleOperator</code> * @return * the variables it uses */ - protected Set<String> collectVariables(UnaryTupleOperator uto) { + protected Set<String> collectVariables(final QueryModelNode op) { final Set<String> vars = new HashSet<String>(); - if (uto instanceof Projection) { - List<ProjectionElem> elems = - ((Projection) uto).getProjectionElemList().getElements(); + if (op instanceof Projection) { + final List<ProjectionElem> elems = + ((Projection) op).getProjectionElemList().getElements(); for (ProjectionElem elem : elems) { vars.add(elem.getSourceName()); } - } else if (uto instanceof MultiProjection) { - List<ProjectionElemList> elemLists = - ((MultiProjection) uto).getProjections(); + } else if (op instanceof MultiProjection) { + final List<ProjectionElemList> elemLists = + ((MultiProjection) op).getProjections(); for (ProjectionElemList list : elemLists) { List<ProjectionElem> elems = list.getElements(); for (ProjectionElem elem : elems) { vars.add(elem.getSourceName()); } } - } else if (uto instanceof Filter) { - Filter f = (Filter) uto; - ValueExpr ve = f.getCondition(); + } else if (op instanceof ValueExpr) { + final ValueExpr ve = (ValueExpr) op; ve.visit(new QueryModelVisitorBase<RuntimeException>() { @Override public void meet(Var v) { vars.add(v.getName()); } }); - } else if (uto instanceof Group) { - Group g = (Group) uto; + } else if (op instanceof Group) { + final Group g = (Group) op; g.visit(new QueryModelVisitorBase<RuntimeException>() { @Override public void meet(Var v) { vars.add(v.getName()); } }); - } else if (uto instanceof Order) { - Order o = (Order) uto; + } else if (op instanceof Order) { + final Order o = (Order) op; o.visit(new QueryModelVisitorBase<RuntimeException>() { @Override public void meet(Var v) { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTreeBuilder.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTreeBuilder.java 2011-01-12 18:25:21 UTC (rev 4077) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTreeBuilder.java 2011-01-12 19:37:57 UTC (rev 4078) @@ -222,7 +222,7 @@ final ValueExpr ve = filter.getCondition(); // make a constraint, attach it to the rule if (ve != null) { - sops.add(new SOp(sopId.incrementAndGet(), ve, g, pg, rslj)); + sops.add(new SOp(sopId.incrementAndGet(), filter, g, pg, rslj)); } final TupleExpr arg = filter.getArg(); Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSesameFilters.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSesameFilters.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSesameFilters.java 2011-01-12 19:37:57 UTC (rev 4078) @@ -0,0 +1,195 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2007. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on Sep 16, 2009 + */ + +package com.bigdata.rdf.sail; + +import java.util.Collection; +import java.util.LinkedList; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.log4j.Logger; +import org.openrdf.model.Literal; +import org.openrdf.model.URI; +import org.openrdf.model.ValueFactory; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.RDFS; +import org.openrdf.query.BindingSet; +import org.openrdf.query.QueryLanguage; +import org.openrdf.query.TupleQueryResult; +import org.openrdf.query.algebra.Projection; +import org.openrdf.query.algebra.QueryRoot; +import org.openrdf.query.algebra.TupleExpr; +import org.openrdf.query.impl.BindingImpl; +import org.openrdf.repository.RepositoryConnection; +import org.openrdf.repository.sail.SailTupleQuery; + +import com.bigdata.bop.BOpUtility; +import com.bigdata.bop.PipelineOp; +import com.bigdata.bop.engine.QueryEngine; +import com.bigdata.rdf.axioms.NoAxioms; +import com.bigdata.rdf.sail.sop.SOp; +import com.bigdata.rdf.sail.sop.SOp2BOpUtility; +import com.bigdata.rdf.sail.sop.SOpTree; +import com.bigdata.rdf.sail.sop.SOpTree.SOpGroup; +import com.bigdata.rdf.sail.sop.SOpTree.SOpGroups; +import com.bigdata.rdf.sail.sop.SOpTreeBuilder; +import com.bigdata.rdf.store.AbstractTripleStore; +import com.bigdata.rdf.store.BD; +import com.bigdata.rdf.vocab.NoVocabulary; + +public class TestSesameFilters extends ProxyBigdataSailTestCase { + + protected static final Logger log = Logger.getLogger(TestSesameFilters.class); + + protected static final boolean INFO = log.isInfoEnabled(); + + @Override + public Properties getProperties() { + + Properties props = super.getProperties(); + + props.setProperty(BigdataSail.Options.AXIOMS_CLASS, NoAxioms.class.getName()); + props.setProperty(BigdataSail.Options.VOCABULARY_CLASS, NoVocabulary.class.getName()); + props.setProperty(BigdataSail.Options.TRUTH_MAINTENANCE, "false"); + props.setProperty(BigdataSail.Options.JUSTIFY, "false"); + props.setProperty(BigdataSail.Options.TEXT_INDEX, "false"); + + return props; + + } + + /** + * + */ + public TestSesameFilters() { + } + + /** + * @param arg0 + */ + public TestSesameFilters(String arg0) { + super(arg0); + } + + public void testRegex() throws Exception { + +// final Sail sail = new MemoryStore(); +// sail.initialize(); +// final Repository repo = new SailRepository(sail); + + final BigdataSail sail = getSail(); + sail.initialize(); + final BigdataSailRepository repo = new BigdataSailRepository(sail); + + final RepositoryConnection cxn = repo.getConnection(); + cxn.setAutoCommit(false); + + try { + + final ValueFactory vf = sail.getValueFactory(); + + /* + * Create some terms. + */ + final URI mike = vf.createURI(BD.NAMESPACE + "mike"); + final URI bryan = vf.createURI(BD.NAMESPACE + "bryan"); + final URI person = vf.createURI(BD.NAMESPACE + "Person"); + final Literal l1 = vf.createLiteral("mike personick"); + final Literal l2 = vf.createLiteral("bryan thompson"); + + /* + * Create some statements. + */ + cxn.add(mike, RDF.TYPE, person); + cxn.add(mike, RDFS.LABEL, l1); + cxn.add(bryan, RDF.TYPE, person); + cxn.add(bryan, RDFS.LABEL, l2); + + /* + * Note: The either flush() or commit() is required to flush the + * statement buffers to the database before executing any operations + * that go around the sail. + */ + cxn.commit(); + + { + + String query = + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix rdf: <"+RDF.NAMESPACE+"> " + + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + + "select * " + + "where { " + + " ?s rdf:type bd:Person . " + + " ?s rdfs:label ?label . " + + " FILTER regex(?label, \"mike\") . " + + "}"; + + final SailTupleQuery tupleQuery = (SailTupleQuery) + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.setIncludeInferred(false /* includeInferred */); + + if (log.isInfoEnabled()) { + + final BigdataSailTupleQuery bdTupleQuery = + (BigdataSailTupleQuery) tupleQuery; + final QueryRoot root = (QueryRoot) bdTupleQuery.getTupleExpr(); + final Projection p = (Projection) root.getArg(); + final TupleExpr tupleExpr = p.getArg(); + final SOpTreeBuilder stb = new SOpTreeBuilder(); + final SOpTree tree = stb.collectSOps(tupleExpr); + + log.info(tree); + log.info(query); + + final TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + log.info(result.next()); + } + + } + + final Collection<BindingSet> answer = new LinkedList<BindingSet>(); + answer.add(createBindingSet( + new BindingImpl("s", mike), + new BindingImpl("label", l1) + )); + + final TupleQueryResult result = tupleQuery.evaluate(); + compare(result, answer); + + } + + } finally { + cxn.close(); + sail.shutDown(); + } + + } + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-01-25 16:01:22
|
Revision: 4164 http://bigdata.svn.sourceforge.net/bigdata/?rev=4164&view=rev Author: mrpersonick Date: 2011-01-25 16:01:15 +0000 (Tue, 25 Jan 2011) Log Message: ----------- bringing over new free text search features from HA branch Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/FreeTextSearchExpander.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSearchQuery.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-01-22 21:13:16 UTC (rev 4163) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-01-25 16:01:15 UTC (rev 4164) @@ -7,8 +7,11 @@ import java.util.Collection; import java.util.HashSet; import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; +import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -615,6 +618,68 @@ final Collection<SOpGroup> groupsToPrune = new LinkedList<SOpGroup>(); /* + * We need to prune Sesame filters that we cannot translate into native + * constraints (ones that require lexicon joins). We also need to + * prune search metadata tails. + */ + final Collection<SOp> sopsToPrune = new LinkedList<SOp>(); + + /* + * deal with free text search tails first. need to match up search + * metadata tails with the searches themselves. ie: + * + * select * + * where { + * ?s bd:search "foo" . + * ?s bd:relevance ?score . + * } + */ + // the statement patterns for metadata about the searches + final Map<Var, Set<StatementPattern>> searchMetadata = + new LinkedHashMap<Var, Set<StatementPattern>>(); + // do a first pass to gather up the actual searches and take them out + // of the master list of statement patterns + for (SOp sop : sopTree) { + final QueryModelNode op = sop.getOperator(); + if (op instanceof StatementPattern) { + final StatementPattern sp = (StatementPattern) op; + final Value s = sp.getSubjectVar().getValue(); + final Value p = sp.getPredicateVar().getValue(); + final Value o = sp.getObjectVar().getValue(); + if (s == null && p != null && o != null && + BD.SEARCH.equals(p)) { + searchMetadata.put(sp.getSubjectVar(), + new LinkedHashSet<StatementPattern>()); + } + } + } + // do a second pass to get the search metadata + for (SOp sop : sopTree) { + final QueryModelNode op = sop.getOperator(); + if (op instanceof StatementPattern) { + final StatementPattern sp = (StatementPattern) op; + final Value s = sp.getSubjectVar().getValue(); + final Value p = sp.getPredicateVar().getValue(); + if (s == null && p != null && + (BD.RELEVANCE.equals(p) || BD.MAX_HITS.equals(p) || + BD.MIN_RELEVANCE.equals(p))) { + final Var sVar = sp.getSubjectVar(); + Set<StatementPattern> metadata = searchMetadata.get(sVar); + if (metadata != null) { + metadata.add(sp); + } + sopsToPrune.add(sop); + } + } + } + + /* + * Prunes the sop tree of search metadata. + */ + sopTree = stb.pruneSOps(sopTree, sopsToPrune); + sopsToPrune.clear(); + + /* * Iterate through the sop tree and translate statement patterns into * predicates. */ @@ -622,8 +687,16 @@ final QueryModelNode op = sop.getOperator(); if (op instanceof StatementPattern) { final StatementPattern sp = (StatementPattern) op; + final Value p = sp.getPredicateVar().getValue(); try { - final IPredicate bop = toPredicate((StatementPattern) op); + final IPredicate bop; + if (p != null && BD.SEARCH.equals(p)) { + final Set<StatementPattern> metadata = + searchMetadata.get(sp.getSubjectVar()); + bop = toSearchPredicate(sp, metadata); + } else { + bop = toPredicate((StatementPattern) op); + } sop.setBOp(bop); } catch (UnrecognizedValueException ex) { /* @@ -657,12 +730,6 @@ final Collection<Filter> sesameFilters = new LinkedList<Filter>(); /* - * We need to prune Sesame filters that we cannot translate into native - * constraints (ones that require lexicon joins). - */ - final Collection<SOp> sopsToPrune = new LinkedList<SOp>(); - - /* * Iterate through the sop tree and translate Sesame ValueExpr operators * into bigdata IConstraint boperators. */ @@ -1842,6 +1909,118 @@ } + private IPredicate toSearchPredicate(final StatementPattern sp, + final Set<StatementPattern> metadata) + throws QueryEvaluationException { + + final Value predValue = sp.getPredicateVar().getValue(); + if (log.isDebugEnabled()) { + log.debug(predValue); + } + if (predValue == null || !BD.SEARCH.equals(predValue)) { + throw new IllegalArgumentException("not a valid magic search: " + sp); + } + final Value objValue = sp.getObjectVar().getValue(); + if (log.isDebugEnabled()) { + log.debug(objValue); + } + if (objValue == null || !(objValue instanceof Literal)) { + throw new IllegalArgumentException("not a valid magic search: " + sp); + } + + final Var subjVar = sp.getSubjectVar(); + + final IVariableOrConstant<IV> search = + com.bigdata.bop.Var.var(subjVar.getName()); + + IVariableOrConstant<IV> relevance = new Constant(DummyIV.INSTANCE); + Literal maxHits = null; + Literal minRelevance = null; + + for (StatementPattern meta : metadata) { + if (!meta.getSubjectVar().equals(subjVar)) { + throw new IllegalArgumentException("illegal metadata: " + meta); + } + final Value pVal = meta.getPredicateVar().getValue(); + final Var oVar = meta.getObjectVar(); + final Value oVal = oVar.getValue(); + if (pVal == null) { + throw new IllegalArgumentException("illegal metadata: " + meta); + } + if (BD.RELEVANCE.equals(pVal)) { + if (oVar.hasValue()) { + throw new IllegalArgumentException("illegal metadata: " + meta); + } + relevance = com.bigdata.bop.Var.var(oVar.getName()); + } else if (BD.MAX_HITS.equals(pVal)) { + if (oVal == null || !(oVal instanceof Literal)) { + throw new IllegalArgumentException("illegal metadata: " + meta); + } + maxHits = (Literal) oVal; + } else if (BD.MIN_RELEVANCE.equals(pVal)) { + if (oVal == null || !(oVal instanceof Literal)) { + throw new IllegalArgumentException("illegal metadata: " + meta); + } + minRelevance = (Literal) oVal; + } + } + + final IAccessPathExpander expander = + new FreeTextSearchExpander(database, (Literal) objValue, + maxHits, minRelevance); + + // Decide on the correct arity for the predicate. + final BOp[] vars = new BOp[] { + search, // s = searchVar + relevance, // p = relevanceVar + new Constant(DummyIV.INSTANCE), // o = reserved + new Constant(DummyIV.INSTANCE), // c = reserved + }; + + // The annotations for the predicate. + final List<NV> anns = new LinkedList<NV>(); + + anns.add(new NV(IPredicate.Annotations.RELATION_NAME, + new String[] { database.getSPORelation().getNamespace() }));// + + // free text search expander or named graphs expander + if (expander != null) + anns.add(new NV(IPredicate.Annotations.ACCESS_PATH_EXPANDER, expander)); + + // timestamp + anns.add(new NV(Annotations.TIMESTAMP, database + .getSPORelation().getTimestamp())); + + /* + * Explicitly set the access path / iterator flags. + * + * Note: High level query generally permits iterator level parallelism. + * We set the PARALLEL flag here so it can be used if a global index + * view is chosen for the access path. + * + * Note: High level query for SPARQL always uses read-only access paths. + * If you are working with a SPARQL extension with UPDATE or INSERT INTO + * semantics then you will need to remote the READONLY flag for the + * mutable access paths. + */ + anns.add(new NV(IPredicate.Annotations.FLAGS, IRangeQuery.DEFAULT + | IRangeQuery.PARALLEL | IRangeQuery.READONLY)); + + return new SPOPredicate(vars, anns.toArray(new NV[anns.size()])); +// return new SPOPredicate( +// new String[] { database.getSPORelation().getNamespace() }, +// -1, // partitionId +// search, // s = searchVar +// relevance, // p = relevanceVar +// new Constant(DummyIV.INSTANCE), // o = reserved +// new Constant(DummyIV.INSTANCE), // c = reserved +// false, // optional +// null, // filter on elements visited by the access path. +// expander // free text search expander or named graphs expander +// ); + + } + /** * Takes a ValueExpression from a sesame Filter or LeftJoin and turns it * into a bigdata {@link IConstraint}. Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/FreeTextSearchExpander.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/FreeTextSearchExpander.java 2011-01-22 21:13:16 UTC (rev 4163) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/FreeTextSearchExpander.java 2011-01-25 16:01:15 UTC (rev 4164) @@ -16,6 +16,7 @@ import com.bigdata.rdf.internal.TermId; import com.bigdata.rdf.internal.VTE; import com.bigdata.rdf.internal.XSDDoubleIV; +import com.bigdata.rdf.lexicon.ITextIndexer; import com.bigdata.rdf.model.BigdataValue; import com.bigdata.rdf.spo.ISPO; import com.bigdata.rdf.spo.SPO; @@ -138,14 +139,28 @@ if (hiterator == null) { assert database!=null; assert query != null; - if (database.getLexiconRelation().getSearchEngine() == null) + + final ITextIndexer textNdx = + database.getLexiconRelation().getSearchEngine(); + + if (textNdx == null) throw new UnsupportedOperationException( "No free text index?"); + // final long begin = System.nanoTime(); - hiterator = database.getLexiconRelation() - .getSearchEngine().search(query.getLabel(), + + String s = query.getLabel(); + final boolean prefixMatch; + if (s.indexOf('*') >= 0) { + prefixMatch = true; + s = s.replaceAll("\\*", ""); + } else { + prefixMatch = false; + } + + hiterator = textNdx.search(s, query.getLanguage(), - false/* prefixMatch */, + prefixMatch, minRelevance == null ? 0d : minRelevance.doubleValue()/* minCosine */, maxHits == null ? 10000 : maxHits.intValue()+1/* maxRank */, 1000L/* timeout */, TimeUnit.MILLISECONDS); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSearchQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSearchQuery.java 2011-01-22 21:13:16 UTC (rev 4163) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSearchQuery.java 2011-01-25 16:01:15 UTC (rev 4164) @@ -934,6 +934,138 @@ } + { // exact match + + final String searchQuery = "brown cow"; + final double minRelevance = 0.0d; + + final String query = + "select ?s ?o ?score " + + "where " + + "{ " + + " ?s <"+RDFS.LABEL+"> ?o . " + + " ?o <"+BD.SEARCH+"> \""+searchQuery+"\" . " + + " ?o <"+BD.RELEVANCE+"> ?score . " + +// " ?o <"+BD.MIN_RELEVANCE+"> \""+minRelevance+"\" . " + +// " ?o <"+BD.MAX_HITS+"> \"5\" . " + + " filter regex(?o, \""+searchQuery+"\") " + + "} " + + "order by desc(?score)"; + + log.info("\n"+query); + + final TupleQuery tupleQuery = + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.setIncludeInferred(true /* includeInferred */); + TupleQueryResult result = tupleQuery.evaluate(); + + int i = 0; + while (result.hasNext()) { + log.info(i++ + ": " + result.next().toString()); + } + assertTrue("wrong # of results: " + i, i == 2); + + result = tupleQuery.evaluate(); + + Collection<BindingSet> answer = new LinkedList<BindingSet>(); + + final ITextIndexer search = + sail.getDatabase().getLexiconRelation().getSearchEngine(); + final Hiterator<IHit> hits = + search.search(searchQuery, + null, // languageCode + false, // prefixMatch + minRelevance, // minCosine + 10000, // maxRank (=maxResults + 1) + 1000L, // timeout + TimeUnit.MILLISECONDS // unit + ); + + while (hits.hasNext()) { + final IHit hit = hits.next(); + final IV id = new TermId(VTE.LITERAL, hit.getDocId()); + final Literal score = vf.createLiteral(hit.getCosine()); + final URI s = uris.get(id); + final Literal o = literals.get(id); + if (!o.getLabel().contains(searchQuery)) + continue; + final BindingSet bs = createBindingSet( + new BindingImpl("s", s), + new BindingImpl("o", o), + new BindingImpl("score", score)); + log.info(bs); + answer.add(bs); + } + + compare(result, answer); + + } + + { // prefix match + + final String searchQuery = "bro*"; + final double minRelevance = 0.0d; + + final String query = + "select ?s ?o ?score " + + "where " + + "{ " + + " ?s <"+RDFS.LABEL+"> ?o . " + + " ?o <"+BD.SEARCH+"> \""+searchQuery+"\" . " + + " ?o <"+BD.RELEVANCE+"> ?score . " + +// " ?o <"+BD.MIN_RELEVANCE+"> \""+minRelevance+"\" . " + +// " ?o <"+BD.MAX_HITS+"> \"5\" . " + +// " filter regex(?o, \""+searchQuery+"\") " + + "} " + + "order by desc(?score)"; + + log.info("\n"+query); + + final TupleQuery tupleQuery = + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.setIncludeInferred(true /* includeInferred */); + TupleQueryResult result = tupleQuery.evaluate(); + + int i = 0; + while (result.hasNext()) { + log.info(i++ + ": " + result.next().toString()); + } + assertTrue("wrong # of results: " + i, i == 3); + + result = tupleQuery.evaluate(); + + Collection<BindingSet> answer = new LinkedList<BindingSet>(); + + final ITextIndexer search = + sail.getDatabase().getLexiconRelation().getSearchEngine(); + final Hiterator<IHit> hits = + search.search(searchQuery, + null, // languageCode + true, // prefixMatch + minRelevance, // minCosine + 10000, // maxRank (=maxResults + 1) + 1000L, // timeout + TimeUnit.MILLISECONDS // unit + ); + + while (hits.hasNext()) { + final IHit hit = hits.next(); + final IV id = new TermId(VTE.LITERAL, hit.getDocId()); + final Literal score = vf.createLiteral(hit.getCosine()); + final URI s = uris.get(id); + final Literal o = literals.get(id); + final BindingSet bs = createBindingSet( + new BindingImpl("s", s), + new BindingImpl("o", o), + new BindingImpl("score", score)); + log.info(bs); + answer.add(bs); + } + + compare(result, answer); + + } + } finally { cxn.close(); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-02-07 20:30:50
|
Revision: 4181 http://bigdata.svn.sourceforge.net/bigdata/?rev=4181&view=rev Author: mrpersonick Date: 2011-02-07 20:30:43 +0000 (Mon, 07 Feb 2011) Log Message: ----------- fixing a problem with nested unions Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTree.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTreeBuilder.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestNestedUnions.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2011-02-07 16:46:53 UTC (rev 4180) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2011-02-07 20:30:43 UTC (rev 4181) @@ -349,6 +349,10 @@ new NV(SliceOp.Annotations.EVALUATION_CONTEXT, BOpEvaluationContext.CONTROLLER),// })),queryHints); + + if (rule.getTailCount() == 0) { + return startOp; + } /* * First put the tails in the correct order based on the logic in @@ -569,12 +573,10 @@ for (int i = 0; i < order.length; i++) { - final int joinId = idFactory.incrementAndGet(); - // assign a bop id to the predicate Predicate<?> pred = (Predicate<?>) rule.getTail(order[i]).setBOpId( idFactory.incrementAndGet()); - + /* * Decorate the predicate with the assigned index (this is purely * informative). @@ -585,7 +587,7 @@ Annotations.ORIGINAL_INDEX, keyOrder[order[i]]); } - // decorate the predicate with the cardinality estimate. + // decorate the predicate with the cardinality estimate. if (cardinality != null) { pred = (Predicate<?>) pred.setProperty( Annotations.ESTIMATED_CARDINALITY, @@ -628,107 +630,9 @@ } } - // annotations for this join. - final List<NV> anns = new LinkedList<NV>(); - - anns.add(new NV(BOp.Annotations.BOP_ID, joinId)); + left = join(queryEngine, left, pred, constraints, context, + idFactory, queryHints); -// anns.add(new NV(PipelineJoin.Annotations.SELECT, -// selectVars[order[i]])); - - // No. The join just looks at the Predicate's optional annotation. -// if (pred.isOptional()) -// anns.add(new NV(PipelineJoin.Annotations.OPTIONAL, pred -// .isOptional())); - - /* - * Pull off annotations before we clear them from the predicate. - */ - final Scope scope = (Scope) pred.getProperty(Annotations.SCOPE); - - // true iff this is a quads access path. - final boolean quads = pred.getProperty(Annotations.QUADS, - Annotations.DEFAULT_QUADS); - - // pull of the Sesame dataset before we strip the annotations. - final Dataset dataset = (Dataset) pred - .getProperty(Annotations.DATASET); - - // strip off annotations that we do not want to propagate. - pred = pred.clearAnnotations(new String[] { Annotations.SCOPE, - Annotations.QUADS, Annotations.DATASET }); - - if (!constraints.isEmpty()) { -// // decorate the predicate with any constraints. -// pred = (Predicate<?>) pred.setProperty( -// IPredicate.Annotations.CONSTRAINTS, constraints -// .toArray(new IConstraint[constraints.size()])); - // add constraints to the join for that predicate. - anns.add(new NV(PipelineJoin.Annotations.CONSTRAINTS, - constraints - .toArray(new IConstraint[constraints.size()]))); - - } - - if (quads) { - - /* - * Quads mode. - */ - - if (enableDecisionTree) { - /* - * Strip off the named graph or default graph expander (in - * the long term it will simply not be generated.) - */ - pred = pred - .clearAnnotations(new String[] { IPredicate.Annotations.ACCESS_PATH_EXPANDER }); - - switch (scope) { - case NAMED_CONTEXTS: - left = namedGraphJoin(queryEngine, context, idFactory, - left, anns, pred, dataset, queryHints); - break; - case DEFAULT_CONTEXTS: - left = defaultGraphJoin(queryEngine, context, idFactory, - left, anns, pred, dataset, queryHints); - break; - default: - throw new AssertionError(); - } - - } else { - - /* - * This is basically the old way of handling quads query - * using expanders which were attached by - * BigdataEvaluationStrategyImpl. - */ - - final boolean scaleOut = queryEngine.isScaleOut(); - if (scaleOut) - throw new UnsupportedOperationException(); - - anns.add(new NV(Predicate.Annotations.EVALUATION_CONTEXT, - BOpEvaluationContext.ANY)); - - anns.add(new NV(PipelineJoin.Annotations.PREDICATE,pred)); - - left = applyQueryHints(new PipelineJoin(new BOp[] { left }, - anns.toArray(new NV[anns.size()])), queryHints); - - } - - } else { - - /* - * Triples or provenance mode. - */ - - left = triplesModeJoin(queryEngine, left, anns, pred, queryHints); - - } - } // if (rule.getConstraintCount() > 0) { @@ -758,7 +662,130 @@ return left; } + + public static PipelineOp join(final QueryEngine queryEngine, + PipelineOp left, Predicate pred, final AtomicInteger idFactory, + final Properties queryHints) { + + return join(queryEngine, left, pred, null, + new BOpContextBase(queryEngine), idFactory, queryHints); + + } + + public static PipelineOp join(final QueryEngine queryEngine, + PipelineOp left, Predicate pred, + final Collection<IConstraint> constraints, + final BOpContextBase context, final AtomicInteger idFactory, + final Properties queryHints) { + + final int joinId = idFactory.incrementAndGet(); + + // annotations for this join. + final List<NV> anns = new LinkedList<NV>(); + + anns.add(new NV(BOp.Annotations.BOP_ID, joinId)); +// anns.add(new NV(PipelineJoin.Annotations.SELECT, +// selectVars[order[i]])); + + // No. The join just looks at the Predicate's optional annotation. +// if (pred.isOptional()) +// anns.add(new NV(PipelineJoin.Annotations.OPTIONAL, pred +// .isOptional())); + + if (constraints != null && !constraints.isEmpty()) { +// // decorate the predicate with any constraints. +// pred = (Predicate<?>) pred.setProperty( +// IPredicate.Annotations.CONSTRAINTS, constraints +// .toArray(new IConstraint[constraints.size()])); + + // add constraints to the join for that predicate. + anns.add(new NV( + PipelineJoin.Annotations.CONSTRAINTS, + constraints.toArray(new IConstraint[constraints.size()]))); + + } + + /* + * Pull off annotations before we clear them from the predicate. + */ + final Scope scope = (Scope) pred.getProperty(Annotations.SCOPE); + + // true iff this is a quads access path. + final boolean quads = pred.getProperty(Annotations.QUADS, + Annotations.DEFAULT_QUADS); + + // pull of the Sesame dataset before we strip the annotations. + final Dataset dataset = (Dataset) pred + .getProperty(Annotations.DATASET); + + // strip off annotations that we do not want to propagate. + pred = pred.clearAnnotations(new String[] { Annotations.SCOPE, + Annotations.QUADS, Annotations.DATASET }); + + if (quads) { + + /* + * Quads mode. + */ + + if (enableDecisionTree) { + /* + * Strip off the named graph or default graph expander (in + * the long term it will simply not be generated.) + */ + pred = pred + .clearAnnotations(new String[] { IPredicate.Annotations.ACCESS_PATH_EXPANDER }); + + switch (scope) { + case NAMED_CONTEXTS: + left = namedGraphJoin(queryEngine, context, idFactory, + left, anns, pred, dataset, queryHints); + break; + case DEFAULT_CONTEXTS: + left = defaultGraphJoin(queryEngine, context, idFactory, + left, anns, pred, dataset, queryHints); + break; + default: + throw new AssertionError(); + } + + } else { + + /* + * This is basically the old way of handling quads query + * using expanders which were attached by + * BigdataEvaluationStrategyImpl. + */ + + final boolean scaleOut = queryEngine.isScaleOut(); + if (scaleOut) + throw new UnsupportedOperationException(); + + anns.add(new NV(Predicate.Annotations.EVALUATION_CONTEXT, + BOpEvaluationContext.ANY)); + + anns.add(new NV(PipelineJoin.Annotations.PREDICATE,pred)); + + left = applyQueryHints(new PipelineJoin(new BOp[] { left }, + anns.toArray(new NV[anns.size()])), queryHints); + + } + + } else { + + /* + * Triples or provenance mode. + */ + + left = triplesModeJoin(queryEngine, left, anns, pred, queryHints); + + } + + return left; + + } + /** * Generate a {@link PipelineJoin} for a triples mode access path. * Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java 2011-02-07 16:46:53 UTC (rev 4180) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java 2011-02-07 20:30:43 UTC (rev 4181) @@ -40,6 +40,7 @@ import org.openrdf.query.algebra.StatementPattern; import com.bigdata.bop.BOp; +import com.bigdata.bop.BOpContextBase; import com.bigdata.bop.BOpEvaluationContext; import com.bigdata.bop.BOpUtility; import com.bigdata.bop.IConstraint; @@ -132,7 +133,8 @@ private static boolean isNonOptionalJoinGroup(final SOpGroup sopGroup) { - return !(isUnion(sopGroup) || isOptional(sopGroup)); + return sopGroup.size() > 0 && + !(isUnion(sopGroup) || isOptional(sopGroup)); } @@ -184,14 +186,37 @@ * Start with left=<this join group> and add a SubqueryOp for each * sub group. */ +// final SOpGroups children = join.getChildren(); +// if (children != null) { +// for (SOpGroup child : children) { +// if (isSingleOptional(child)) { +// // handled by the rule() conversion above +// continue; +// } +// final PipelineOp subquery = convert( +// child, idFactory, db, queryEngine, queryHints); +// final boolean optional = isOptional(child); +// final int subqueryId = idFactory.incrementAndGet(); +// left = new SubqueryOp(new BOp[]{left}, +// new NV(Predicate.Annotations.BOP_ID, subqueryId),// +// new NV(SubqueryOp.Annotations.SUBQUERY, subquery),// +// new NV(SubqueryOp.Annotations.OPTIONAL,optional)// +// ); +// if (log.isInfoEnabled()) { +// log.info("adding a subquery: " + subqueryId + "\n" + left); +// } +// } +// } final SOpGroups children = join.getChildren(); if (children != null) { - for (SOpGroup child : join.getChildren()) { - if (isSingleOptional(child)) { - // handled by the rule() conversion above + /* + * First do the non-optional subqueries (UNIONs) + */ + for (SOpGroup child : children) { + if (!isUnion(child)) continue; - } + final PipelineOp subquery = convert( child, idFactory, db, queryEngine, queryHints); final boolean optional = isOptional(child); @@ -205,6 +230,39 @@ log.info("adding a subquery: " + subqueryId + "\n" + left); } } + + /* + * Next do the optional subqueries and optional tails + */ + for (SOpGroup child : children) { + if (isUnion(child)) + continue; + + if (isSingleOptional(child)) { + final SOp sop = child.getSingletonSOp(); + final BOp bop = sop.getBOp(); + Predicate pred = (Predicate) bop.setProperty( + IPredicate.Annotations.OPTIONAL, Boolean.TRUE); + pred = pred.setBOpId(idFactory.incrementAndGet()); + left = Rule2BOpUtility.join( + queryEngine, left, pred, + idFactory, + queryHints); + } else { + final PipelineOp subquery = convert( + child, idFactory, db, queryEngine, queryHints); + final boolean optional = isOptional(child); + final int subqueryId = idFactory.incrementAndGet(); + left = new SubqueryOp(new BOp[]{left}, + new NV(Predicate.Annotations.BOP_ID, subqueryId),// + new NV(SubqueryOp.Annotations.SUBQUERY, subquery),// + new NV(SubqueryOp.Annotations.OPTIONAL,optional)// + ); + if (log.isInfoEnabled()) { + log.info("adding a subquery: " + subqueryId + "\n" + left); + } + } + } } for (IConstraint c : postConditionals) { @@ -313,23 +371,23 @@ } } - /* - * The way that the Sesame operator tree is parsed, optional tails - * become single-operator (predicate) join groups without any children - * of their own. - */ - final SOpGroups children = group.getChildren(); - if (children != null) { - for (SOpGroup child : group.getChildren()) { - if (isSingleOptional(child)) { - final SOp sop = child.getSingletonSOp(); - final BOp bop = sop.getBOp(); - final IPredicate pred = (IPredicate) bop.setProperty( - IPredicate.Annotations.OPTIONAL, Boolean.TRUE); - preds.add(pred); - } - } - } +// /* +// * The way that the Sesame operator tree is parsed, optional tails +// * become single-operator (predicate) join groups without any children +// * of their own. +// */ +// final SOpGroups children = group.getChildren(); +// if (children != null) { +// for (SOpGroup child : children) { +// if (isSingleOptional(child)) { +// final SOp sop = child.getSingletonSOp(); +// final BOp bop = sop.getBOp(); +// final IPredicate pred = (IPredicate) bop.setProperty( +// IPredicate.Annotations.OPTIONAL, Boolean.TRUE); +// preds.add(pred); +// } +// } +// } /* * Gather up all the variables used by predicates in this group Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTree.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTree.java 2011-02-07 16:46:53 UTC (rev 4180) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTree.java 2011-02-07 20:30:43 UTC (rev 4181) @@ -43,10 +43,14 @@ } group.add(sop); } + if (!groups.containsKey(0)) { + // need a dummy root group + groups.put(0, new LinkedList<SOp>()); + } for (Integer g : groups.keySet()) { final List<SOp> group = groups.get(g); - final int pg = group.get(0).getParentGroup(); + final int pg = group.isEmpty() ? -1 : group.get(0).getParentGroup(); allGroups.put(g, new SOpGroup(g, pg, group)); } @@ -195,7 +199,10 @@ } sb.append("SOp -> parent:").append(nl); for (Map.Entry<Integer, SOpGroup> e : this.parents.entrySet()) { - sb.append(e.getKey() + " -> " + e.getValue().getGroup()).append(nl); + sb.append(e.getKey()); + sb.append(" -> "); + sb.append(e.getValue() == null ? "null" : e.getValue().getGroup()); + sb.append(nl); } sb.append("SOp -> children:").append(nl); for (Map.Entry<Integer, SOpGroups> e : this.children.entrySet()) { @@ -206,7 +213,9 @@ } sb2.setLength(sb2.length()-2); sb.append(e.getKey() + " -> {" + sb2.toString() + "}"); + sb.append(nl); } + sb.setLength(sb.length()-1); return sb.toString(); } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTreeBuilder.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTreeBuilder.java 2011-02-07 16:46:53 UTC (rev 4180) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOpTreeBuilder.java 2011-02-07 20:30:43 UTC (rev 4181) @@ -121,6 +121,8 @@ collectSOps(sops, (LeftJoin) left, rslj, g, pg); } else if (left instanceof SingletonSet){ // do nothing + } else if (left instanceof Union){ + collectSOps(sops, (Union) left, rslj, groupId.incrementAndGet(), g); } else { throw new UnsupportedOperatorException(left); } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestNestedUnions.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestNestedUnions.java 2011-02-07 16:46:53 UTC (rev 4180) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestNestedUnions.java 2011-02-07 20:30:43 UTC (rev 4181) @@ -28,14 +28,12 @@ import java.util.Collection; import java.util.LinkedList; -import java.util.Map; import java.util.Properties; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.log4j.Logger; -import org.openrdf.model.Literal; import org.openrdf.model.URI; import org.openrdf.model.ValueFactory; +import org.openrdf.model.impl.LiteralImpl; import org.openrdf.model.vocabulary.RDF; import org.openrdf.model.vocabulary.RDFS; import org.openrdf.query.BindingSet; @@ -48,17 +46,9 @@ import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.sail.SailTupleQuery; -import com.bigdata.bop.BOpUtility; -import com.bigdata.bop.PipelineOp; -import com.bigdata.bop.engine.QueryEngine; import com.bigdata.rdf.axioms.NoAxioms; -import com.bigdata.rdf.sail.sop.SOp; -import com.bigdata.rdf.sail.sop.SOp2BOpUtility; import com.bigdata.rdf.sail.sop.SOpTree; -import com.bigdata.rdf.sail.sop.SOpTree.SOpGroup; -import com.bigdata.rdf.sail.sop.SOpTree.SOpGroups; import com.bigdata.rdf.sail.sop.SOpTreeBuilder; -import com.bigdata.rdf.store.AbstractTripleStore; import com.bigdata.rdf.store.BD; import com.bigdata.rdf.vocab.NoVocabulary; @@ -229,4 +219,118 @@ } + public void testNestedUnionWithOptionals() throws Exception { + +// final Sail sail = new MemoryStore(); +// sail.initialize(); +// final Repository repo = new SailRepository(sail); + + final BigdataSail sail = getSail(); + try { + sail.initialize(); + final BigdataSailRepository repo = new BigdataSailRepository(sail); + + final RepositoryConnection cxn = repo.getConnection(); + + try { + + cxn.setAutoCommit(false); + + final ValueFactory vf = sail.getValueFactory(); + + /* + * Create some terms. + */ + final URI john = vf.createURI(BD.NAMESPACE + "john"); + final URI mary = vf.createURI(BD.NAMESPACE + "mary"); + final URI leon = vf.createURI(BD.NAMESPACE + "leon"); + final URI paul = vf.createURI(BD.NAMESPACE + "paul"); + final URI brad = vf.createURI(BD.NAMESPACE + "brad"); + final URI fred = vf.createURI(BD.NAMESPACE + "fred"); + final URI knows = vf.createURI(BD.NAMESPACE + "knows"); + + /* + * Create some statements. + */ + cxn.add(mary, knows, fred); + cxn.add(john, knows, leon); + cxn.add(john, RDFS.LABEL, vf.createLiteral("John")); + cxn.add(mary, RDF.TYPE, RDFS.RESOURCE); + + /* + * Note: The either flush() or commit() is required to flush the + * statement buffers to the database before executing any + * operations that go around the sail. + */ + cxn.commit(); + + { + + String query = + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix rdf: <"+RDF.NAMESPACE+"> " + + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + + "select * " + + "where { " + + " { " + + " ?a bd:knows bd:fred . " + + " } UNION { " + + " ?a bd:knows bd:leon . " + + " } " + + " OPTIONAL { ?a rdf:type ?type } " + + " OPTIONAL { ?a rdfs:label ?label } " + + "}"; + + final SailTupleQuery tupleQuery = (SailTupleQuery) + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.setIncludeInferred(false /* includeInferred */); + + if (log.isInfoEnabled()) { + + final BigdataSailTupleQuery bdTupleQuery = + (BigdataSailTupleQuery) tupleQuery; + final QueryRoot root = (QueryRoot) bdTupleQuery.getTupleExpr(); + final Projection p = (Projection) root.getArg(); + final TupleExpr tupleExpr = p.getArg(); + + log.info(tupleExpr); + + final SOpTreeBuilder stb = new SOpTreeBuilder(); + final SOpTree tree = stb.collectSOps(tupleExpr); + + log.info(tree); + log.info(query); + + final TupleQueryResult result = tupleQuery.evaluate(); + log.info("results:"); + while (result.hasNext()) { + log.info(result.next()); + } + + } + + final Collection<BindingSet> answer = new LinkedList<BindingSet>(); + answer.add(createBindingSet( + new BindingImpl("a", john), + new BindingImpl("label", vf.createLiteral("John")) + )); + answer.add(createBindingSet( + new BindingImpl("a", mary), + new BindingImpl("type", RDFS.RESOURCE) + )); + + final TupleQueryResult result = tupleQuery.evaluate(); + compare(result, answer); + + } + + } finally { + cxn.close(); + } + } finally { + sail.__tearDownUnitTest();//shutDown(); + } + + } + } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-02-21 17:12:15
|
Revision: 4215 http://bigdata.svn.sourceforge.net/bigdata/?rev=4215&view=rev Author: mrpersonick Date: 2011-02-21 17:12:09 +0000 (Mon, 21 Feb 2011) Log Message: ----------- better handling of unrecognized values in queries Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEvaluationStrategyImpl.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestNestedUnions.java Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/union.ttl Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-02-21 14:30:25 UTC (rev 4214) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-02-21 17:12:09 UTC (rev 4215) @@ -703,13 +703,14 @@ * If we encounter a value not in the lexicon, we can * still continue with the query if the value is in * either an optional tail or an optional join group (i.e. - * if it appears on the right side of a LeftJoin). + * if it appears on the right side of a LeftJoin). We can + * also continue if the value is in a UNION. * Otherwise we can stop evaluating right now. */ - if (sop.isRightSideLeftJoin()) { + if (sop.getGroup() == SOpTreeBuilder.ROOT_GROUP_ID) { + throw new UnrecognizedValueException(ex); + } else { groupsToPrune.add(sopTree.getGroup(sop.getGroup())); - } else { - throw new UnrecognizedValueException(ex); } } } @@ -720,6 +721,15 @@ * not in the lexicon. */ sopTree = stb.pruneGroups(sopTree, groupsToPrune); + + /* + * If after pruning groups with unrecognized values we end up with a + * UNION with no subqueries, we can safely just return an empty + * iteration. + */ + if (SOp2BOpUtility.isEmptyUnion(sopTree.getRoot())) { + return new EmptyIteration<BindingSet, QueryEvaluationException>(); + } /* * If we have a filter in the root group (one that can be safely applied @@ -2047,9 +2057,33 @@ } private IConstraint toConstraint(Or or) { - final IConstraint right = toConstraint(or.getRightArg()); - final IConstraint left = toConstraint(or.getLeftArg()); - return new OR(left, right); + IConstraint left = null, right = null; + UnrecognizedValueException uve = null; + try { + left = toConstraint(or.getLeftArg()); + } catch (UnrecognizedValueException ex) { + uve = ex; + } + try { + right = toConstraint(or.getRightArg()); + } catch (UnrecognizedValueException ex) { + uve = ex; + } + + /* + * if both sides contain unrecognized values, then we need to throw + * the exception up. but if only one does, then we can still handle it + * since we are doing an OR. + */ + if (left == null && right == null) { + throw uve; + } + + if (left != null && right != null) { + return new OR(left, right); + } else { + return left != null ? left : right; + } } private IConstraint toConstraint(And and) { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java 2011-02-21 14:30:25 UTC (rev 4214) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java 2011-02-21 17:12:09 UTC (rev 4215) @@ -105,6 +105,18 @@ } + public static boolean isEmptyUnion(final SOpGroup sopGroup) { + + if (isUnion(sopGroup)) { + final SOpGroups children = sopGroup.getChildren(); + if (children == null || children.size() == 0) { + return true; + } + } + return false; + + } + /** * Because of the way we parse the Sesame operator tree, the single * optional tails get placed in their own singleton subgroup without any @@ -214,10 +226,10 @@ * First do the non-optional subqueries (UNIONs) */ for (SOpGroup child : children) { - if (!isUnion(child)) + if (!isUnion(child) || isEmptyUnion(child)) continue; - final PipelineOp subquery = convert( + final PipelineOp subquery = union( child, idFactory, db, queryEngine, queryHints); final boolean optional = isOptional(child); final int subqueryId = idFactory.incrementAndGet(); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEvaluationStrategyImpl.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEvaluationStrategyImpl.java 2011-02-21 14:30:25 UTC (rev 4214) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEvaluationStrategyImpl.java 2011-02-21 17:12:09 UTC (rev 4215) @@ -118,7 +118,7 @@ " ?p = <"+property1+">) " + "}"; - { // evalute it once so i can see it + { // evaluate it once so i can see it final StringWriter sw = new StringWriter(); final SPARQLResultsXMLWriter handler = new SPARQLResultsXMLWriter( @@ -891,6 +891,29 @@ } + public void test_join_combo_3() throws Exception { + + // define the vocabulary + + // define the graph + Graph graph = new GraphImpl(); + + // define the query + String query = + "PREFIX : <http://example/> " + + "SELECT * " + + "{ " + + " { ?x :p ?y } UNION { ?p a ?z } " + + "}"; + + // define the correct answer + Collection<BindingSet> answer = new LinkedList<BindingSet>(); + + // run the test + runQuery(graph, query, answer); + + } + public void test_two_nested_opt() throws Exception { // define the vocabulary Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestNestedUnions.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestNestedUnions.java 2011-02-21 14:30:25 UTC (rev 4214) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestNestedUnions.java 2011-02-21 17:12:09 UTC (rev 4215) @@ -39,12 +39,18 @@ import org.openrdf.query.BindingSet; import org.openrdf.query.QueryLanguage; import org.openrdf.query.TupleQueryResult; +import org.openrdf.query.algebra.Distinct; import org.openrdf.query.algebra.Projection; import org.openrdf.query.algebra.QueryRoot; import org.openrdf.query.algebra.TupleExpr; import org.openrdf.query.impl.BindingImpl; +import org.openrdf.repository.Repository; import org.openrdf.repository.RepositoryConnection; +import org.openrdf.repository.sail.SailRepository; import org.openrdf.repository.sail.SailTupleQuery; +import org.openrdf.rio.RDFFormat; +import org.openrdf.sail.Sail; +import org.openrdf.sail.memory.MemoryStore; import com.bigdata.rdf.axioms.NoAxioms; import com.bigdata.rdf.sail.sop.SOpTree; @@ -332,5 +338,109 @@ } } + + public void testForumBug() throws Exception { + +// final Sail sail = new MemoryStore(); + final Sail sail = getSail(); + + try { + + sail.initialize(); + final Repository repo = sail instanceof BigdataSail ? + new BigdataSailRepository((BigdataSail)sail) : + new SailRepository(sail); + final RepositoryConnection cxn = repo.getConnection(); + + try { + + final ValueFactory vf = sail.getValueFactory(); + + cxn.setAutoCommit(false); + + /* + * load the data + */ + cxn.add(getClass().getResourceAsStream("union.ttl"),"",RDFFormat.TURTLE); + + /* + * Note: The either flush() or commit() is required to flush the + * statement buffers to the database before executing any + * operations that go around the sail. + */ + cxn.commit(); + + { + + String query = + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix rdf: <"+RDF.NAMESPACE+"> " + + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + + "SELECT DISTINCT ?neType ?majorType ?minorType " + + "WHERE { " + + " { " + + " ?neType <http://www.w3.org/2000/01/rdf-schema#subClassOf> <http://example/class/NamedEntity> . " + + " FILTER(?neType != <http://example/class/NamedEntity>) " + + " } " + + " UNION " + + " { ?lookup <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://example/class/Lookup> . " + + " ?lookup <http://example/prop/lookup/majorType> ?majorType . " + + " OPTIONAL { ?lookup <http://example/prop/lookup/minorType> ?minorType } " + + " } " + + "}"; + + final SailTupleQuery tupleQuery = (SailTupleQuery) + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.setIncludeInferred(false /* includeInferred */); + + if (sail instanceof BigdataSail && log.isInfoEnabled()) { + + final BigdataSailTupleQuery bdTupleQuery = + (BigdataSailTupleQuery) tupleQuery; + final QueryRoot root = (QueryRoot) bdTupleQuery.getTupleExpr(); + final Distinct d = (Distinct) root.getArg(); + final Projection p = (Projection) d.getArg(); + final TupleExpr tupleExpr = p.getArg(); + log.info(tupleExpr); + + final SOpTreeBuilder stb = new SOpTreeBuilder(); + final SOpTree tree = stb.collectSOps(tupleExpr); + + log.info(tree); + log.info(query); + + } + + if (log.isInfoEnabled()) { + final TupleQueryResult result = tupleQuery.evaluate(); + log.info("results:"); + while (result.hasNext()) { + log.info(result.next()); + } + } + + final Collection<BindingSet> answer = new LinkedList<BindingSet>(); + answer.add(createBindingSet( + new BindingImpl("neType", vf.createURI("http://example/class/Location")) + )); + answer.add(createBindingSet( + new BindingImpl("neType", vf.createURI("http://example/class/Person")) + )); + + final TupleQueryResult result = tupleQuery.evaluate(); + compare(result, answer); + + } + + } finally { + cxn.close(); + } + } finally { + if (sail instanceof BigdataSail) + ((BigdataSail)sail).__tearDownUnitTest();//shutDown(); + } + + } + } Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/union.ttl =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/union.ttl (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/union.ttl 2011-02-21 17:12:09 UTC (rev 4215) @@ -0,0 +1,21 @@ +@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> . +@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> . + +<http://example/class/Location> rdfs:subClassOf <http://example/class/NamedEntity> . +<http://example/class/Location> rdfs:label "Location" . +<http://example/class/NamedEntity> rdfs:label "NamedEntity" . +<http://example/elem/loc/loc1> rdfs:label "label: Amsterdam" . +<http://example/elem/loc/loc1> rdf:type <http://example/class/Location> . +<http://example/elem/loc/loc1> <http://example/prop/lookupName> "amsterdam" . +<http://example/elem/loc/loc2> rdfs:label "label: Den Haag" . +<http://example/elem/loc/loc2> rdf:type <http://example/class/Location> . +<http://example/elem/loc/loc2> <http://example/prop/lookupName> "den haag" . +<http://example/elem/loc/loc3> rdfs:label "label: IJmuiden" . +<http://example/elem/loc/loc3> rdf:type <http://example/class/Location> . +<http://example/elem/loc/loc3> <http://example/prop/lookupName> "ijmuiden" . +<http://example/elem/loc/loc3> <http://example/prop/disabled> "true"^^<http://www.w3.org/2001/XMLSchema#boolean> . +<http://example/class/Person> rdfs:subClassOf <http://example/class/NamedEntity> . +<http://example/class/Person> rdfs:label "Person" . +<http://example/elem/person/puk> rdfs:label "label: Puk van de Petteflet" . +<http://example/elem/person/puk> rdf:type <http://example/class/Person> . +<http://example/elem/person/puk> <http://example/prop/lookupName> "puk van de petteflet" . This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-02-22 20:58:01
|
Revision: 4225 http://bigdata.svn.sourceforge.net/bigdata/?rev=4225&view=rev Author: mrpersonick Date: 2011-02-22 20:57:55 +0000 (Tue, 22 Feb 2011) Log Message: ----------- isLiteral support Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestInlineValues.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-02-22 20:56:49 UTC (rev 4224) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-02-22 20:57:55 UTC (rev 4225) @@ -21,6 +21,7 @@ import org.openrdf.model.Literal; import org.openrdf.model.URI; import org.openrdf.model.Value; +import org.openrdf.model.impl.BooleanLiteralImpl; import org.openrdf.query.BindingSet; import org.openrdf.query.Dataset; import org.openrdf.query.QueryEvaluationException; @@ -29,6 +30,7 @@ import org.openrdf.query.algebra.Compare; import org.openrdf.query.algebra.Filter; import org.openrdf.query.algebra.Group; +import org.openrdf.query.algebra.IsLiteral; import org.openrdf.query.algebra.Join; import org.openrdf.query.algebra.LeftJoin; import org.openrdf.query.algebra.MathExpr; @@ -84,10 +86,12 @@ import com.bigdata.btree.keys.IKeyBuilderFactory; import com.bigdata.rdf.internal.DummyIV; import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.XSDBooleanIV; import com.bigdata.rdf.internal.constraints.AndBOp; import com.bigdata.rdf.internal.constraints.CompareBOp; import com.bigdata.rdf.internal.constraints.EBVBOp; import com.bigdata.rdf.internal.constraints.IsBoundBOp; +import com.bigdata.rdf.internal.constraints.IsLiteralBOp; import com.bigdata.rdf.internal.constraints.MathBOp; import com.bigdata.rdf.internal.constraints.NotBOp; import com.bigdata.rdf.internal.constraints.OrBOp; @@ -2059,6 +2063,7 @@ */ private IValueExpression<IV> toVE(final ValueExpr ve) throws UnsupportedOperatorException { + if (ve instanceof Var) { return toVE((Var) ve); } else if (ve instanceof ValueConstant) { @@ -2077,6 +2082,8 @@ return toVE((Compare) ve); } else if (ve instanceof Bound) { return toVE((Bound) ve); + } else if (ve instanceof IsLiteral) { + return toVE((IsLiteral) ve); } throw new UnsupportedOperatorException(ve); @@ -2158,6 +2165,11 @@ return new IsBoundBOp(var); } + private IValueExpression<IV> toVE(final IsLiteral isLiteral) { + final IVariable<IV> var = (IVariable<IV>) toVE(isLiteral.getArg()); + return new IsLiteralBOp(var); + } + /** * Generate a bigdata term from a Sesame term. * <p> @@ -2189,7 +2201,14 @@ * value does not exist in the lexicon. */ private IConstant<IV> toVE(final ValueConstant vc) { - final IV iv = ((BigdataValue) vc.getValue()).getIV(); + final IV iv; + final Value v = vc.getValue(); + if (v instanceof BooleanLiteralImpl) { + final BooleanLiteralImpl bl = (BooleanLiteralImpl) v; + iv = XSDBooleanIV.valueOf(bl.booleanValue()); + } else { + iv = ((BigdataValue) vc.getValue()).getIV(); + } if (iv == null) throw new UnrecognizedValueException(vc.getValue()); return new Constant<IV>(iv); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestInlineValues.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestInlineValues.java 2011-02-22 20:56:49 UTC (rev 4224) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestInlineValues.java 2011-02-22 20:57:55 UTC (rev 4225) @@ -29,6 +29,8 @@ import java.util.Collection; import java.util.LinkedList; import java.util.Properties; + +import org.apache.log4j.Logger; import org.openrdf.model.Literal; import org.openrdf.model.URI; import org.openrdf.model.ValueFactory; @@ -48,6 +50,8 @@ */ public class TestInlineValues extends ProxyBigdataSailTestCase { + protected static final Logger log = Logger.getLogger(TestInlineValues.class); + @Override public Properties getProperties() { @@ -210,4 +214,127 @@ } + public void testIsLiteral() throws Exception { + + final BigdataSail sail = getSail(); + sail.initialize(); + final BigdataSailRepository repo = new BigdataSailRepository(sail); + final BigdataSailRepositoryConnection cxn = + (BigdataSailRepositoryConnection) repo.getConnection(); + cxn.setAutoCommit(false); + + try { + + final ValueFactory vf = sail.getValueFactory(); + + URI A = vf.createURI("_:A"); + URI B = vf.createURI("_:B"); + URI X = vf.createURI("_:X"); + URI AGE = vf.createURI("_:AGE"); + Literal _25 = vf.createLiteral(25); + Literal _45 = vf.createLiteral(45); + + cxn.add(A, RDF.TYPE, X); + cxn.add(B, RDF.TYPE, X); + cxn.add(A, AGE, _25); + cxn.add(B, AGE, _45); + + /* + * Note: The either flush() or commit() is required to flush the + * statement buffers to the database before executing any operations + * that go around the sail. + */ + cxn.flush();//commit(); + + if (log.isInfoEnabled()) { + log.info("\n" + sail.getDatabase().dumpStore()); + } + + { + + String query = + "select ?s ?age " + + "WHERE { " + + " ?s <"+RDF.TYPE+"> <"+X+"> . " + + " ?s <"+AGE+"> ?age . " + + " FILTER( isLiteral(?age) ) . " + + "}"; + + final TupleQuery tupleQuery = + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + if (log.isInfoEnabled()) { + final TupleQueryResult result = tupleQuery.evaluate(); + log.info("results:"); + if (!result.hasNext()) { + log.info("no results."); + } + while (result.hasNext()) { + log.info(result.next()); + } + } + + final TupleQueryResult result = tupleQuery.evaluate(); + + Collection<BindingSet> solution = new LinkedList<BindingSet>(); + solution.add(createBindingSet(new Binding[] { + new BindingImpl("s", A), + new BindingImpl("age", _25) + })); + solution.add(createBindingSet(new Binding[] { + new BindingImpl("s", B), + new BindingImpl("age", _45) + })); + + compare(result, solution); + + } + + { + + String query = + "select ?s ?age " + + "WHERE { " + + " ?s <"+RDF.TYPE+"> <"+X+"> . " + + " ?s <"+AGE+"> ?age . " + + " FILTER( isLiteral("+_25.toString()+") ) . " + + "}"; + + final TupleQuery tupleQuery = + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + + if (log.isInfoEnabled()) { + final TupleQueryResult result = tupleQuery.evaluate(); + log.info("results:"); + if (!result.hasNext()) { + log.info("no results."); + } + while (result.hasNext()) { + log.info(result.next()); + } + } + + final TupleQueryResult result = tupleQuery.evaluate(); + + Collection<BindingSet> solution = new LinkedList<BindingSet>(); + solution.add(createBindingSet(new Binding[] { + new BindingImpl("s", A), + new BindingImpl("age", _25) + })); + solution.add(createBindingSet(new Binding[] { + new BindingImpl("s", B), + new BindingImpl("age", _45) + })); + + compare(result, solution); + + } + + } finally { + cxn.close(); + sail.__tearDownUnitTest(); + } + + } + } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-02-24 23:10:49
|
Revision: 4247 http://bigdata.svn.sourceforge.net/bigdata/?rev=4247&view=rev Author: mrpersonick Date: 2011-02-24 23:10:42 +0000 (Thu, 24 Feb 2011) Log Message: ----------- changed how describe is optimized Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestDescribe.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java 2011-02-24 20:33:29 UTC (rev 4246) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java 2011-02-24 23:10:42 UTC (rev 4247) @@ -10,6 +10,8 @@ import java.util.LinkedList; import java.util.Properties; import java.util.Set; + +import org.apache.log4j.Logger; import org.openrdf.model.Resource; import org.openrdf.model.Statement; import org.openrdf.model.URI; @@ -46,6 +48,8 @@ public class BigdataSailGraphQuery extends SailGraphQuery implements BigdataSailQuery { + protected static Logger log = Logger.getLogger(BigdataSailGraphQuery.class); + /** * Query hints are embedded in query strings as namespaces. * See {@link QueryHints#NAMESPACE} for more information. @@ -74,8 +78,10 @@ protected void optimizeDescribe() { try { - ParsedQuery parsedQuery = getParsedQuery(); + ParsedQuery parsedQuery = getParsedQuery(); TupleExpr node = parsedQuery.getTupleExpr(); + if (log.isInfoEnabled()) + log.info(node); node = ((Reduced) node).getArg(); node = ((Projection) node).getArg(); ValueExpr ve = ((Filter) node).getCondition(); @@ -90,7 +96,7 @@ vars.add(var); } }); - Collection<Join> joins = new LinkedList<Join>(); + Collection<StatementPattern> sps = new LinkedList<StatementPattern>(); Collection<ProjectionElemList> projElemLists = new LinkedList<ProjectionElemList>(); for (Var v : vars) { @@ -98,7 +104,7 @@ Var p = createAnonVar("-p" + v.getName() + "-1"); Var o = createAnonVar("-o" + v.getName()); StatementPattern sp = new StatementPattern(v, p, o); - joins.add(new Join(node, sp)); + sps.add(sp); ProjectionElemList projElemList = new ProjectionElemList(); projElemList.addElement(new ProjectionElem(v.getName(), "subject")); projElemList.addElement(new ProjectionElem(p.getName(), "predicate")); @@ -109,7 +115,7 @@ Var s = createAnonVar("-s" + v.getName()); Var p = createAnonVar("-p" + v.getName() + "-2"); StatementPattern sp = new StatementPattern(s, p, v); - joins.add(new Join(node, sp)); + sps.add(sp); ProjectionElemList projElemList = new ProjectionElemList(); projElemList.addElement(new ProjectionElem(s.getName(), "subject")); projElemList.addElement(new ProjectionElem(p.getName(), "predicate")); @@ -117,12 +123,12 @@ projElemLists.add(projElemList); } } - Iterator<Join> it = joins.iterator(); - node = it.next(); + Iterator<StatementPattern> it = sps.iterator(); + Union union = new Union(it.next(), it.next()); while (it.hasNext()) { - Join j = it.next(); - node = new Union(j, node); + union = new Union(union, it.next()); } + node = new Join(node, union); node = new MultiProjection(node, projElemLists); node = new Reduced(node); parsedQuery.setTupleExpr(node); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestDescribe.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestDescribe.java 2011-02-24 20:33:29 UTC (rev 4246) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestDescribe.java 2011-02-24 23:10:42 UTC (rev 4247) @@ -23,9 +23,9 @@ package com.bigdata.rdf.sail; -import java.util.Collection; -import java.util.LinkedList; import java.util.Properties; + +import org.apache.log4j.Logger; import org.openrdf.model.Literal; import org.openrdf.model.Statement; import org.openrdf.model.URI; @@ -33,16 +33,15 @@ import org.openrdf.model.impl.URIImpl; import org.openrdf.model.vocabulary.RDF; import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.Binding; import org.openrdf.query.BindingSet; -import org.openrdf.query.GraphQuery; import org.openrdf.query.GraphQueryResult; import org.openrdf.query.QueryLanguage; import org.openrdf.query.TupleQuery; import org.openrdf.query.TupleQueryResult; import org.openrdf.query.algebra.TupleExpr; -import org.openrdf.query.impl.BindingImpl; + import com.bigdata.rdf.axioms.NoAxioms; +import com.bigdata.rdf.store.BD; import com.bigdata.rdf.vocab.NoVocabulary; /** @@ -51,6 +50,8 @@ */ public class TestDescribe extends ProxyBigdataSailTestCase { + protected static Logger log = Logger.getLogger(TestDescribe.class); + @Override public Properties getProperties() { @@ -90,15 +91,21 @@ try { - URI mike = new URIImpl("_:Mike"); - URI person = new URIImpl("_:Person"); - URI likes = new URIImpl("_:likes"); - URI rdf = new URIImpl("_:RDF"); - Literal label = new LiteralImpl("Mike"); + URI mike = new URIImpl(BD.NAMESPACE+"Mike"); + URI bryan = new URIImpl(BD.NAMESPACE+"Bryan"); + URI person = new URIImpl(BD.NAMESPACE+"Person"); + URI likes = new URIImpl(BD.NAMESPACE+"likes"); + URI rdf = new URIImpl(BD.NAMESPACE+"RDF"); + URI rdfs = new URIImpl(BD.NAMESPACE+"RDFS"); + Literal label1 = new LiteralImpl("Mike"); + Literal label2 = new LiteralImpl("Bryan"); /**/ cxn.add(mike, RDF.TYPE, person); cxn.add(mike, likes, rdf); - cxn.add(mike, RDFS.LABEL, label); + cxn.add(mike, RDFS.LABEL, label1); + cxn.add(bryan, RDF.TYPE, person); + cxn.add(bryan, likes, rdfs); + cxn.add(bryan, RDFS.LABEL, label2); /**/ /* @@ -116,11 +123,21 @@ { String query = -// "describe ?x " + -// "WHERE { " + -// " ?x <"+RDF.TYPE+"> <"+person+"> . " + -// "}"; - "describe <"+mike+">"; + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix rdf: <"+RDF.NAMESPACE+"> " + + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + + + "describe ?x " + + "WHERE { " + +// " { " + + " ?x rdf:type bd:Person . " + + " ?x bd:likes bd:RDF " + +// " } union { " + +// " ?x rdf:type bd:Person . " + +// " ?x bd:likes bd:RDFS " + +// " } " + + "}"; +// "describe <"+mike+">"; // "construct { " + // " <"+mike+"> ?p1 ?o . " + // " ?s ?p2 <"+mike+"> . " + @@ -158,25 +175,28 @@ GraphQueryResult result = graphQuery.evaluate(); final TupleExpr tupleExpr = graphQuery.getTupleExpr(); - System.err.println(tupleExpr); + log.info(tupleExpr); while(result.hasNext()) { Statement s = result.next(); - System.err.println(s); + log.info(s); } } { String query = - "construct { " + - " ?x ?px1 ?ox . " + - " ?sx ?px2 ?x . " + + "construct { " + + " ?x ?p1 ?o . " + + " ?s ?p2 ?x . " + "} " + "WHERE { " + " ?x <"+RDF.TYPE+"> <"+person+"> . " + - " OPTIONAL { ?x ?px1 ?ox . } . " + - " OPTIONAL { ?sx ?px2 ?x . } . " + + " {" + + " ?x ?p1 ?ox . " + + " } UNION {" + + " ?sx ?p2 ?x . " + + " } " + "}"; /* @@ -204,11 +224,11 @@ GraphQueryResult result = graphQuery.evaluate(); final TupleExpr tupleExpr = graphQuery.getTupleExpr(); - System.err.println(tupleExpr); + log.info(tupleExpr); while(result.hasNext()) { Statement s = result.next(); - System.err.println(s); + log.info(s); } } @@ -295,148 +315,15 @@ GraphQueryResult result = graphQuery.evaluate(); final TupleExpr tupleExpr = graphQuery.getTupleExpr(); - System.err.println(tupleExpr); + log.info(tupleExpr); while(result.hasNext()) { Statement s = result.next(); - System.err.println(s); + log.info(s); } } - { - - String query = -// "construct {" + -// " ?x ?px1 ?ox . " + -// " ?sx ?px2 ?x . " + -// " ?y ?py1 ?oy . " + -// " ?sy ?py2 ?y . " + -// "} " + - "SELECT * " + - "WHERE { " + - " ?x <"+likes+"> ?y . " + - " OPTIONAL { ?x ?px1 ?ox . } . " + - " OPTIONAL { ?sx ?px2 ?x . } . " + - " OPTIONAL { ?y ?py1 ?oy . } . " + - " OPTIONAL { ?sy ?py2 ?y . } . " + - "}"; - -/* - construct { - ?s ?p ?o . - } - where { - ?x likes ?y . - ?s ?p ?o . - FILTER(?s == ?x || ?o == ?x || ?s == ?y || ?o == ?y) . - } - - construct { - ?x ?px1 ?ox . - ?sx ?px2 ?x . - ?y ?py1 ?oy . - ?sy ?py2 ?y . - } - where { - ?x likes ?y . - OPTIONAL { ?x ?px1 ?ox . } . - OPTIONAL { ?sx ?px2 ?x . } . - OPTIONAL { ?y ?py1 ?oy . } . - OPTIONAL { ?sy ?py2 ?y . } . - } -*/ -/* - final BigdataSailGraphQuery graphQuery = (BigdataSailGraphQuery) - cxn.prepareGraphQuery(QueryLanguage.SPARQL, query); - GraphQueryResult result = graphQuery.evaluate(); - - final TupleExpr tupleExpr = graphQuery.getTupleExpr(); - System.err.println(tupleExpr); - - while(result.hasNext()) { - Statement s = result.next(); - System.err.println(s); - } -*/ - final TupleQuery tupleQuery = - cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); - final TupleQueryResult result = tupleQuery.evaluate(); - - while(result.hasNext()) { - BindingSet bs = result.next(); - System.err.println(bs); - } - - } - { - - String query = -// "construct {" + -// " ?x ?px1 ?ox . " + -// " ?sx ?px2 ?x . " + -// " ?y ?py1 ?oy . " + -// " ?sy ?py2 ?y . " + -// "} " + - "SELECT * " + - "WHERE { " + - " { ?x <"+likes+"> ?y . ?x ?px1 ?ox . } " + - " UNION " + - " { ?x <"+likes+"> ?y . ?sx ?px2 ?x . } " + - " UNION " + - " { ?x <"+likes+"> ?y . ?y ?py1 ?oy . } " + - " UNION " + - " { ?x <"+likes+"> ?y . ?sy ?py2 ?y . } " + - "}"; - -/* - construct { - ?s ?p ?o . - } - where { - ?x likes ?y . - ?s ?p ?o . - FILTER(?s == ?x || ?o == ?x || ?s == ?y || ?o == ?y) . - } - - construct { - ?x ?px1 ?ox . - ?sx ?px2 ?x . - ?y ?py1 ?oy . - ?sy ?py2 ?y . - } - where { - ?x likes ?y . - OPTIONAL { ?x ?px1 ?ox . } . - OPTIONAL { ?sx ?px2 ?x . } . - OPTIONAL { ?y ?py1 ?oy . } . - OPTIONAL { ?sy ?py2 ?y . } . - } -*/ -/* - final BigdataSailGraphQuery graphQuery = (BigdataSailGraphQuery) - cxn.prepareGraphQuery(QueryLanguage.SPARQL, query); - GraphQueryResult result = graphQuery.evaluate(); - - final TupleExpr tupleExpr = graphQuery.getTupleExpr(); - System.err.println(tupleExpr); - - while(result.hasNext()) { - Statement s = result.next(); - System.err.println(s); - } -*/ - final TupleQuery tupleQuery = - cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); - final TupleQueryResult result = tupleQuery.evaluate(); - - while(result.hasNext()) { - BindingSet bs = result.next(); - System.err.println(bs); - } - - } - } finally { cxn.close(); sail.__tearDownUnitTest(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-03-29 12:13:47
|
Revision: 4344 http://bigdata.svn.sourceforge.net/bigdata/?rev=4344&view=rev Author: thompsonbry Date: 2011-03-29 12:13:40 +0000 (Tue, 29 Mar 2011) Log Message: ----------- I've refactored the rewrite logic out of the BigdataSailConnection and into its own class (BigdataValueReplacer) and prepared the skeleton of a test suite for the rewrite logic (TestBigdataValueReplacer) such that it is possible to write unit tests against the rewriter without running a query against the Sail. I've incorporated a unit test into TestBigdataValueReplacer which replicates the problem described in [1]. Before going further, I would like to clarify whether we should simply filter out bindings not used by the query or also reintroduce those bindings when translating the query solutions into openrdf binding sets. [1] https://sourceforge.net/apps/trac/bigdata/ticket/271 Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEmbeddedFederationWithQuads.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithQuads.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithSids.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithoutSids.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestPruneBindingSets.java Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataValueReplacer.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java 2011-03-28 20:15:45 UTC (rev 4343) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java 2011-03-29 12:13:40 UTC (rev 4344) @@ -3481,251 +3481,15 @@ * as a variable! * * @return yucky hack, need to return a new dataset and a new binding - * set. dataset is [0], binding set is [1] + * set. dataset is [0], binding set is [1] */ - protected Object[] replaceValues(Dataset dataset, - final TupleExpr tupleExpr, BindingSet bindings) + protected Object[] replaceValues(final Dataset dataset, + final TupleExpr tupleExpr, final BindingSet bindings) throws SailException { - /* - * Resolve the values used by this query. - * - * Note: If any value can not be resolved, then its term identifer - * will remain ZERO (0L) (aka NULL). Except within OPTIONALs, this - * indicates that the query CAN NOT be satisified by the data since - * one or more required terms are unknown to the database. - */ - final HashMap<Value, BigdataValue> values = new HashMap<Value, BigdataValue>(); - - final BigdataValueFactory valueFactory = database.getValueFactory(); - - if (dataset != null) { - - for(URI uri : dataset.getDefaultGraphs()) - values.put(uri, valueFactory.asValue(uri)); - - for(URI uri : dataset.getNamedGraphs()) - values.put(uri, valueFactory.asValue(uri)); - - } - - tupleExpr.visit(new QueryModelVisitorBase<SailException>() { - - @Override - public void meet(final Var var) { - - if (var.hasValue()) { - - final Value val = var.getValue(); - - // add BigdataValue variant of the var's Value. - values.put(val, valueFactory.asValue(val)); - - } - - } - - @Override - public void meet(final ValueConstant constant) { - - if (constant.getParentNode() instanceof LangMatches) { - /* Don't try to resolve for lang matches. - * - * Note: Sesame will sometimes use a Literal to represent - * a constant parameter to a function, such as LangMatches. - * For such uses, we DO NOT want to attempt to resolve the - * Literal against the lexicon. Instead, it should just be - * passed through. BigdataSailEvaluationStrategy is then - * responsible for recognizing cases where the lack of an - * IV on a constant is associated with such function calls - * rather than indicating that the Value is not known to - * the KB. - */ - return; - } - - final Value val = constant.getValue(); - - // add BigdataValue variant of the var's Value. - values.put(val, valueFactory.asValue(val)); - - } - - }); + return new BigdataValueReplacer(database).replaceValues(dataset, + tupleExpr, bindings); - if (bindings != null) { - - Iterator<Binding> it = bindings.iterator(); - - while (it.hasNext()) { - - final Binding binding = it.next(); - - final Value val = binding.getValue(); - - // add BigdataValue variant of the var's Value. - values.put(val, valueFactory.asValue(val)); - - } - - } - - /* - * Batch resolve term identifiers for those BigdataValues. - * - * Note: If any value does not exist in the lexicon then its term - * identifier will be ZERO (0L). - */ - { - - final BigdataValue[] terms = values.values().toArray( - new BigdataValue[] {}); - - database.getLexiconRelation().addTerms(terms, terms.length, - true/* readOnly */); - - } - - /* - * Replace the values with BigdataValues having their resolve term - * identifiers. - */ - tupleExpr.visit(new QueryModelVisitorBase<SailException>() { - - @Override - public void meet(Var var) { - - if (var.hasValue()) { - - // the Sesame Value object. - final Value val = var.getValue(); - - // Lookup the resolve BigdataValue object. - final BigdataValue val2 = values.get(val); - - assert val2 != null : "value not found: "+var.getValue(); - - if (log.isDebugEnabled()) - log.debug("value: " + val + " : " + val2 + " (" - + val2.getIV() + ")"); - - if (val2.getIV() == null) { - - /* - * Since the term identifier is NULL this value is - * not known to the kb. - */ - - if(log.isInfoEnabled()) - log.info("Not in knowledge base: " + val2); - - } - - // replace the constant in the query. - var.setValue(val2); - - } - } - - @Override - public void meet(ValueConstant constant) { - - if (constant.getParentNode() instanceof LangMatches) { - /* Note: This is parallel to the meet in the visit - * pattern above. - */ - return; - } - - // the Sesame Value object. - final Value val = constant.getValue(); - - // Lookup the resolve BigdataValue object. - final BigdataValue val2 = values.get(val); - - assert val2 != null : "value not found: "+constant.getValue(); - - if (log.isDebugEnabled()) - log.debug("value: " + val + " : " + val2 + " (" - + val2.getIV() + ")"); - - if (val2.getIV() == null) { - - /* - * Since the term identifier is NULL this value is - * not known to the kb. - */ - - if(log.isInfoEnabled()) - log.info("Not in knowledge base: " + val2); - - } - - // replace the constant in the query. - constant.setValue(val2); - - } - - }); - - if (bindings != null) { - - MapBindingSet bindings2 = new MapBindingSet(); - - Iterator<Binding> it = bindings.iterator(); - - while (it.hasNext()) { - - final BindingImpl binding = (BindingImpl) it.next(); - - final Value val = binding.getValue(); - - // Lookup the resolve BigdataValue object. - final BigdataValue val2 = values.get(val); - - assert val2 != null : "value not found: "+binding.getValue(); - - if (log.isDebugEnabled()) - log.debug("value: " + val + " : " + val2 + " (" - + val2.getIV() + ")"); - - if (val2.getIV() == null) { - - /* - * Since the term identifier is NULL this value is - * not known to the kb. - */ - - if(log.isInfoEnabled()) - log.info("Not in knowledge base: " + val2); - - } - - // replace the constant in the query. - bindings2.addBinding(binding.getName(), val2); - - } - - bindings = bindings2; - - } - - if (dataset != null) { - - final DatasetImpl dataset2 = new DatasetImpl(); - - for(URI uri : dataset.getDefaultGraphs()) - dataset2.addDefaultGraph((URI)values.get(uri)); - - for(URI uri : dataset.getNamedGraphs()) - dataset2.addNamedGraph((URI)values.get(uri)); - - dataset = dataset2; - - } - - return new Object[] { dataset, bindings }; - } /** Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java 2011-03-29 12:13:40 UTC (rev 4344) @@ -0,0 +1,338 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on Mar 29, 2011 + */ + +package com.bigdata.rdf.sail; + +import java.util.HashMap; +import java.util.Iterator; + +import org.apache.log4j.Logger; +import org.openrdf.model.URI; +import org.openrdf.model.Value; +import org.openrdf.query.Binding; +import org.openrdf.query.BindingSet; +import org.openrdf.query.Dataset; +import org.openrdf.query.algebra.LangMatches; +import org.openrdf.query.algebra.StatementPattern; +import org.openrdf.query.algebra.TupleExpr; +import org.openrdf.query.algebra.ValueConstant; +import org.openrdf.query.algebra.Var; +import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; +import org.openrdf.query.impl.BindingImpl; +import org.openrdf.query.impl.DatasetImpl; +import org.openrdf.query.impl.MapBindingSet; +import org.openrdf.sail.SailException; + +import com.bigdata.rdf.model.BigdataValue; +import com.bigdata.rdf.model.BigdataValueFactory; +import com.bigdata.rdf.store.AbstractTripleStore; + +/** + * Utility class to manage the efficient translation of openrdf {@link Value}s + * in a {@link TupleExpr} or {@link BindingSet} into the {@link BigdataValue}s + * used internally by bigdata. + * + * @author <a href="mailto:tho...@us...">Bryan Thompson</a> + * @version $Id$ + */ +public class BigdataValueReplacer { + + private final static Logger log = Logger + .getLogger(BigdataValueReplacer.class); + + private final AbstractTripleStore database; + + public BigdataValueReplacer(final AbstractTripleStore database) { + + if(database == null) + throw new IllegalArgumentException(); + + this.database = database; + + } + + /** + * Batch resolve and replace all {@link Value} objects stored in variables + * or in the {@link Dataset} with {@link BigdataValue} objects, which have + * access to the 64-bit internal term identifier associated with each value + * in the database. + * <p> + * Note: The native rule execution must examine the resulting + * {@link BigdataValue}s. If any value does not exist in the lexicon then + * its term identifier will be ZERO (0L). {@link StatementPattern}s with + * term identifiers of ZERO (0L) WILL NOT match anything in the data and + * MUST NOT be executed since a ZERO (0L) will be interpreted as a variable! + * + * @return yucky hack, need to return a new dataset and a new binding set. + * dataset is [0], binding set is [1] + */ + public Object[] replaceValues(Dataset dataset, + final TupleExpr tupleExpr, BindingSet bindings) + throws SailException { + + /* + * Resolve the values used by this query. + * + * Note: If any value can not be resolved, then its term identifer + * will remain ZERO (0L) (aka NULL). Except within OPTIONALs, this + * indicates that the query CAN NOT be satisified by the data since + * one or more required terms are unknown to the database. + */ + final HashMap<Value, BigdataValue> values = new HashMap<Value, BigdataValue>(); + + final BigdataValueFactory valueFactory = database.getValueFactory(); + + if (dataset != null) { + + for(URI uri : dataset.getDefaultGraphs()) + values.put(uri, valueFactory.asValue(uri)); + + for(URI uri : dataset.getNamedGraphs()) + values.put(uri, valueFactory.asValue(uri)); + + } + + tupleExpr.visit(new QueryModelVisitorBase<SailException>() { + + @Override + public void meet(final Var var) { + + if (var.hasValue()) { + + final Value val = var.getValue(); + + // add BigdataValue variant of the var's Value. + values.put(val, valueFactory.asValue(val)); + + } + + } + + @Override + public void meet(final ValueConstant constant) { + + if (constant.getParentNode() instanceof LangMatches) { + /* Don't try to resolve for lang matches. + * + * Note: Sesame will sometimes use a Literal to represent + * a constant parameter to a function, such as LangMatches. + * For such uses, we DO NOT want to attempt to resolve the + * Literal against the lexicon. Instead, it should just be + * passed through. BigdataSailEvaluationStrategy is then + * responsible for recognizing cases where the lack of an + * IV on a constant is associated with such function calls + * rather than indicating that the Value is not known to + * the KB. + */ + return; + } + + final Value val = constant.getValue(); + + // add BigdataValue variant of the var's Value. + values.put(val, valueFactory.asValue(val)); + + } + + }); + + if (bindings != null) { + + Iterator<Binding> it = bindings.iterator(); + + while (it.hasNext()) { + + final Binding binding = it.next(); + + final Value val = binding.getValue(); + + // add BigdataValue variant of the var's Value. + values.put(val, valueFactory.asValue(val)); + + } + + } + + /* + * Batch resolve term identifiers for those BigdataValues. + * + * Note: If any value does not exist in the lexicon then its term + * identifier will be ZERO (0L). + */ + { + + final BigdataValue[] terms = values.values().toArray( + new BigdataValue[] {}); + + database.getLexiconRelation().addTerms(terms, terms.length, + true/* readOnly */); + + } + + /* + * Replace the values with BigdataValues having their resolve term + * identifiers. + */ + tupleExpr.visit(new QueryModelVisitorBase<SailException>() { + + @Override + public void meet(Var var) { + + if (var.hasValue()) { + + // the Sesame Value object. + final Value val = var.getValue(); + + // Lookup the resolve BigdataValue object. + final BigdataValue val2 = values.get(val); + + assert val2 != null : "value not found: "+var.getValue(); + + if (log.isDebugEnabled()) + log.debug("value: " + val + " : " + val2 + " (" + + val2.getIV() + ")"); + + if (val2.getIV() == null) { + + /* + * Since the term identifier is NULL this value is + * not known to the kb. + */ + + if(log.isInfoEnabled()) + log.info("Not in knowledge base: " + val2); + + } + + // replace the constant in the query. + var.setValue(val2); + + } + } + + @Override + public void meet(ValueConstant constant) { + + if (constant.getParentNode() instanceof LangMatches) { + /* Note: This is parallel to the meet in the visit + * pattern above. + */ + return; + } + + // the Sesame Value object. + final Value val = constant.getValue(); + + // Lookup the resolve BigdataValue object. + final BigdataValue val2 = values.get(val); + + assert val2 != null : "value not found: "+constant.getValue(); + + if (log.isDebugEnabled()) + log.debug("value: " + val + " : " + val2 + " (" + + val2.getIV() + ")"); + + if (val2.getIV() == null) { + + /* + * Since the term identifier is NULL this value is + * not known to the kb. + */ + + if(log.isInfoEnabled()) + log.info("Not in knowledge base: " + val2); + + } + + // replace the constant in the query. + constant.setValue(val2); + + } + + }); + + if (bindings != null) { + + MapBindingSet bindings2 = new MapBindingSet(); + + Iterator<Binding> it = bindings.iterator(); + + while (it.hasNext()) { + + final BindingImpl binding = (BindingImpl) it.next(); + + final Value val = binding.getValue(); + +// Lookup the resolve BigdataValue object. + final BigdataValue val2 = values.get(val); + + assert val2 != null : "value not found: "+binding.getValue(); + + if (log.isDebugEnabled()) + log.debug("value: " + val + " : " + val2 + " (" + + val2.getIV() + ")"); + + if (val2.getIV() == null) { + + /* + * Since the term identifier is NULL this value is + * not known to the kb. + */ + + if(log.isInfoEnabled()) + log.info("Not in knowledge base: " + val2); + + } + + // replace the constant in the query. + bindings2.addBinding(binding.getName(), val2); + + } + + bindings = bindings2; + + } + + if (dataset != null) { + + final DatasetImpl dataset2 = new DatasetImpl(); + + for(URI uri : dataset.getDefaultGraphs()) + dataset2.addDefaultGraph((URI)values.get(uri)); + + for(URI uri : dataset.getNamedGraphs()) + dataset2.addNamedGraph((URI)values.get(uri)); + + dataset = dataset2; + + } + + return new Object[] { dataset, bindings }; + + } + +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEmbeddedFederationWithQuads.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEmbeddedFederationWithQuads.java 2011-03-28 20:15:45 UTC (rev 4343) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEmbeddedFederationWithQuads.java 2011-03-29 12:13:40 UTC (rev 4344) @@ -85,6 +85,9 @@ final ProxyTestSuite suite = new ProxyTestSuite(delegate, "SAIL with Quads (embedded federation)"); + // test rewrite of RDF Value => BigdataValue for binding set and tuple expr. + suite.addTestSuite(TestBigdataValueReplacer.class); + // test pruning of variables not required for downstream processing. suite.addTestSuite(TestPruneBindingSets.class); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithQuads.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithQuads.java 2011-03-28 20:15:45 UTC (rev 4343) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithQuads.java 2011-03-29 12:13:40 UTC (rev 4344) @@ -67,6 +67,9 @@ final ProxyTestSuite suite = new ProxyTestSuite(delegate, "SAIL with Quads (pipeline joins)"); + // test rewrite of RDF Value => BigdataValue for binding set and tuple expr. + suite.addTestSuite(TestBigdataValueReplacer.class); + // test pruning of variables not required for downstream processing. suite.addTestSuite(TestPruneBindingSets.class); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithSids.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithSids.java 2011-03-28 20:15:45 UTC (rev 4343) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithSids.java 2011-03-29 12:13:40 UTC (rev 4344) @@ -61,6 +61,9 @@ final ProxyTestSuite suite = new ProxyTestSuite(delegate, "SAIL with Triples (with SIDs)"); + // test rewrite of RDF Value => BigdataValue for binding set and tuple expr. + suite.addTestSuite(TestBigdataValueReplacer.class); + // test pruning of variables not required for downstream processing. suite.addTestSuite(TestPruneBindingSets.class); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithoutSids.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithoutSids.java 2011-03-28 20:15:45 UTC (rev 4343) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithoutSids.java 2011-03-29 12:13:40 UTC (rev 4344) @@ -61,6 +61,9 @@ final ProxyTestSuite suite = new ProxyTestSuite(delegate, "SAIL with Triples (no SIDs)"); + // test rewrite of RDF Value => BigdataValue for binding set and tuple expr. + suite.addTestSuite(TestBigdataValueReplacer.class); + // test pruning of variables not required for downstream processing. suite.addTestSuite(TestPruneBindingSets.class); Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataValueReplacer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataValueReplacer.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataValueReplacer.java 2011-03-29 12:13:40 UTC (rev 4344) @@ -0,0 +1,158 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on Mar 29, 2011 + */ + +package com.bigdata.rdf.sail; + +import java.util.Properties; + +import org.openrdf.model.Value; +import org.openrdf.model.impl.LiteralImpl; +import org.openrdf.model.impl.URIImpl; +import org.openrdf.query.MalformedQueryException; +import org.openrdf.query.QueryEvaluationException; +import org.openrdf.query.QueryLanguage; +import org.openrdf.query.TupleQuery; +import org.openrdf.query.TupleQueryResult; +import org.openrdf.repository.RepositoryException; +import org.openrdf.sail.SailException; + +import com.bigdata.rdf.axioms.NoAxioms; +import com.bigdata.rdf.model.BigdataValue; +import com.bigdata.rdf.vocab.NoVocabulary; + +/** + * Test suite for the logic which rewrites a query, replacing {@link Value} + * constants with {@link BigdataValue} constants which have been resolved + * against the database. + * + * @author <a href="mailto:tho...@us...">Bryan Thompson</a> + * @version $Id$ + */ +public class TestBigdataValueReplacer extends ProxyBigdataSailTestCase { + + /** + * + */ + public TestBigdataValueReplacer() { + } + + /** + * @param name + */ + public TestBigdataValueReplacer(String name) { + super(name); + } + + @Override + public Properties getProperties() { + + Properties props = super.getProperties(); + + props.setProperty(BigdataSail.Options.TRUTH_MAINTENANCE, "false"); + props.setProperty(BigdataSail.Options.AXIOMS_CLASS, NoAxioms.class.getName()); + props.setProperty(BigdataSail.Options.VOCABULARY_CLASS, NoVocabulary.class.getName()); + props.setProperty(BigdataSail.Options.JUSTIFY, "false"); + props.setProperty(BigdataSail.Options.TEXT_INDEX, "false"); + + return props; + + } + + /** + * Unit test for bindings passed into a query which are not used by the + * query. + * + * @throws RepositoryException + * @throws SailException + * @throws MalformedQueryException + * @throws QueryEvaluationException + * + * @see https://sourceforge.net/apps/trac/bigdata/ticket/271 + */ + public void test_bug() throws RepositoryException, SailException, + MalformedQueryException, QueryEvaluationException { + + final BigdataSail sail = getSail(); + + try { + + sail.initialize(); + final BigdataSailRepository repo = new BigdataSailRepository(sail); + final BigdataSailRepositoryConnection cxn = (BigdataSailRepositoryConnection) repo + .getConnection(); + try { + + cxn.setAutoCommit(false); + + /* + * Add a statement so the query does not get short circuited + * because some of the terms in the query are undefined in the + * database. + */ + cxn.add(new URIImpl("s:1"), new URIImpl("p:1"), new URIImpl( + "s:2")); + + final String query = "select ?a ?b WHERE {?a <p:1> ?b}"; + + final TupleQuery q = cxn.prepareTupleQuery( + QueryLanguage.SPARQL, query); + + /* + * Setup some bindings. + */ + // bind to a term in the database. + q.setBinding("a", new URIImpl("s:2")); + // bind to a term in the database. + q.setBinding("b", new URIImpl("s:2")); + // bind to a term NOT found in the database. + q.setBinding("notused", new LiteralImpl("lit")); + + /* + * Evaluate the query. + */ + final TupleQueryResult result = q.evaluate(); + try { + // @todo verify that the binding was passed along unchanged. + } finally { + result.close(); + } + + } finally { + + cxn.close(); + + } + + } finally { + + sail.__tearDownUnitTest(); + + } + + } + +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataValueReplacer.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestPruneBindingSets.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestPruneBindingSets.java 2011-03-28 20:15:45 UTC (rev 4343) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestPruneBindingSets.java 2011-03-29 12:13:40 UTC (rev 4344) @@ -82,14 +82,16 @@ public void testPruneBindingSets() throws Exception { final BigdataSail sail = getSail(); - sail.initialize(); - final BigdataSailRepository repo = new BigdataSailRepository(sail); - final BigdataSailRepositoryConnection cxn = - (BigdataSailRepositoryConnection) repo.getConnection(); - cxn.setAutoCommit(false); try { + sail.initialize(); + final BigdataSailRepository repo = new BigdataSailRepository(sail); + final BigdataSailRepositoryConnection cxn = + (BigdataSailRepositoryConnection) repo.getConnection(); + try { + cxn.setAutoCommit(false); + URI x = new URIImpl("_:X"); URI a = new URIImpl("_:A"); URI b = new URIImpl("_:B"); @@ -139,9 +141,12 @@ compare(result, solution); } + } finally { + + cxn.close(); + } } finally { - cxn.close(); sail.__tearDownUnitTest(); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-03-29 12:40:00
|
Revision: 4345 http://bigdata.svn.sourceforge.net/bigdata/?rev=4345&view=rev Author: thompsonbry Date: 2011-03-29 12:39:54 +0000 (Tue, 29 Mar 2011) Log Message: ----------- I've modified the BigdataValueResolver to track the variables used in the query and drop bindings whose variable name does not appear in the query. The unit test has been updated to verify the expected solution and now passes. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataValueReplacer.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java 2011-03-29 12:13:40 UTC (rev 4344) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java 2011-03-29 12:39:54 UTC (rev 4345) @@ -29,6 +29,8 @@ import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; import org.apache.log4j.Logger; import org.openrdf.model.URI; @@ -97,12 +99,17 @@ /* * Resolve the values used by this query. * - * Note: If any value can not be resolved, then its term identifer + * Note: If any value can not be resolved, then its term identifier * will remain ZERO (0L) (aka NULL). Except within OPTIONALs, this - * indicates that the query CAN NOT be satisified by the data since + * indicates that the query CAN NOT be satisfied by the data since * one or more required terms are unknown to the database. */ final HashMap<Value, BigdataValue> values = new HashMap<Value, BigdataValue>(); + + /* + * The set of variables encountered in the query. + */ + final Map<String/* name */, Var> vars = new LinkedHashMap<String, Var>(); final BigdataValueFactory valueFactory = database.getValueFactory(); @@ -121,6 +128,8 @@ @Override public void meet(final Var var) { + vars.put(var.getName(), var); + if (var.hasValue()) { final Value val = var.getValue(); @@ -162,7 +171,7 @@ if (bindings != null) { - Iterator<Binding> it = bindings.iterator(); + final Iterator<Binding> it = bindings.iterator(); while (it.hasNext()) { @@ -247,7 +256,7 @@ // the Sesame Value object. final Value val = constant.getValue(); - // Lookup the resolve BigdataValue object. + // Lookup the resolved BigdataValue object. final BigdataValue val2 = values.get(val); assert val2 != null : "value not found: "+constant.getValue(); @@ -277,17 +286,28 @@ if (bindings != null) { - MapBindingSet bindings2 = new MapBindingSet(); + final MapBindingSet bindings2 = new MapBindingSet(); - Iterator<Binding> it = bindings.iterator(); + final Iterator<Binding> it = bindings.iterator(); while (it.hasNext()) { final BindingImpl binding = (BindingImpl) it.next(); + + if (!vars.containsKey(binding.getName())) { + + // Drop bindings which are not used within the query. + + if (log.isInfoEnabled()) + log.info("Dropping unused binding: var=" + binding); + + continue; + + } final Value val = binding.getValue(); -// Lookup the resolve BigdataValue object. + // Lookup the resolved BigdataValue object. final BigdataValue val2 = values.get(val); assert val2 != null : "value not found: "+binding.getValue(); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataValueReplacer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataValueReplacer.java 2011-03-29 12:13:40 UTC (rev 4344) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataValueReplacer.java 2011-03-29 12:39:54 UTC (rev 4345) @@ -27,16 +27,22 @@ package com.bigdata.rdf.sail; +import java.util.Collection; +import java.util.LinkedList; import java.util.Properties; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; import org.openrdf.model.Value; import org.openrdf.model.impl.LiteralImpl; import org.openrdf.model.impl.URIImpl; +import org.openrdf.query.BindingSet; import org.openrdf.query.MalformedQueryException; import org.openrdf.query.QueryEvaluationException; import org.openrdf.query.QueryLanguage; import org.openrdf.query.TupleQuery; import org.openrdf.query.TupleQueryResult; +import org.openrdf.query.impl.MapBindingSet; import org.openrdf.repository.RepositoryException; import org.openrdf.sail.SailException; @@ -70,8 +76,10 @@ @Override public Properties getProperties() { - Properties props = super.getProperties(); +// Logger.getLogger(BigdataValueReplacer.class).setLevel(Level.ALL); + final Properties props = super.getProperties(); + props.setProperty(BigdataSail.Options.TRUTH_MAINTENANCE, "false"); props.setProperty(BigdataSail.Options.AXIOMS_CLASS, NoAxioms.class.getName()); props.setProperty(BigdataSail.Options.VOCABULARY_CLASS, NoVocabulary.class.getName()); @@ -93,8 +101,8 @@ * * @see https://sourceforge.net/apps/trac/bigdata/ticket/271 */ - public void test_bug() throws RepositoryException, SailException, - MalformedQueryException, QueryEvaluationException { + public void test_dropUnusedBindings() throws RepositoryException, + SailException, MalformedQueryException, QueryEvaluationException { final BigdataSail sail = getSail(); @@ -125,18 +133,26 @@ * Setup some bindings. */ // bind to a term in the database. - q.setBinding("a", new URIImpl("s:2")); + q.setBinding("a", new URIImpl("s:1")); // bind to a term in the database. q.setBinding("b", new URIImpl("s:2")); // bind to a term NOT found in the database. q.setBinding("notused", new LiteralImpl("lit")); /* - * Evaluate the query. + * Evaluate the query and verify that the correct solution + * is produced. */ + final Collection<BindingSet> expected = new LinkedList<BindingSet>(); + { + final MapBindingSet bset = new MapBindingSet(); + bset.addBinding("a", new URIImpl("s:1")); + bset.addBinding("b", new URIImpl("s:2")); + expected.add(bset); + } final TupleQueryResult result = q.evaluate(); try { - // @todo verify that the binding was passed along unchanged. + compare(result, expected); } finally { result.close(); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-04-06 21:27:51
|
Revision: 4375 http://bigdata.svn.sourceforge.net/bigdata/?rev=4375&view=rev Author: mrpersonick Date: 2011-04-06 21:27:45 +0000 (Wed, 06 Apr 2011) Log Message: ----------- renamed some QueryHints constants Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailBooleanQuery.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailQuery.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailTupleQuery.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHints.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryOptimizerEnum.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHints.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-04-06 20:03:35 UTC (rev 4374) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-04-06 21:27:45 UTC (rev 4375) @@ -313,7 +313,7 @@ * This is the top-level method called by the SAIL to evaluate a query. * The TupleExpr parameter here is guaranteed to be the root of the operator * tree for the query. Query hints are parsed by the SAIL from the - * namespaces in the original query. See {@link QueryHints#NAMESPACE}. + * namespaces in the original query. See {@link QueryHints#PREFIX}. * <p> * The query root will be handled by the native Sesame evaluation until we * reach one of three possible top-level operators (union, join, or left Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java 2011-04-06 20:03:35 UTC (rev 4374) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java 2011-04-06 21:27:45 UTC (rev 4375) @@ -3378,7 +3378,7 @@ * {@link Options#QUERY_TIME_EXPANDER}, but not on a per-query basis. * <p> * QueryHints are a set of properties that are parsed from a SPARQL - * query. See {@link QueryHints#NAMESPACE} for more information. + * query. See {@link QueryHints#PREFIX} for more information. * * @todo The [bindings] are supposed to be inputs to the query * evaluation, but I am still not quite clear what the role of the Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailBooleanQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailBooleanQuery.java 2011-04-06 20:03:35 UTC (rev 4374) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailBooleanQuery.java 2011-04-06 21:27:45 UTC (rev 4375) @@ -20,7 +20,7 @@ /** * Query hints are embedded in query strings as namespaces. - * See {@link QueryHints#NAMESPACE} for more information. + * See {@link QueryHints#PREFIX} for more information. */ private final Properties queryHints; @@ -42,7 +42,7 @@ /** * Overriden to use query hints from SPARQL queries. Query hints are * embedded in query strings as namespaces. - * See {@link QueryHints#NAMESPACE} for more information. + * See {@link QueryHints#PREFIX} for more information. */ @Override public boolean evaluate() throws QueryEvaluationException { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java 2011-04-06 20:03:35 UTC (rev 4374) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java 2011-04-06 21:27:45 UTC (rev 4375) @@ -53,7 +53,7 @@ /** * Query hints are embedded in query strings as namespaces. - * See {@link QueryHints#NAMESPACE} for more information. + * See {@link QueryHints#PREFIX} for more information. */ private final Properties queryHints; @@ -234,7 +234,7 @@ /** * Overriden to use query hints from SPARQL queries. Query hints are * embedded in query strings as namespaces. - * See {@link QueryHints#NAMESPACE} for more information. + * See {@link QueryHints#PREFIX} for more information. */ @Override public GraphQueryResult evaluate() throws QueryEvaluationException { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailQuery.java 2011-04-06 20:03:35 UTC (rev 4374) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailQuery.java 2011-04-06 21:27:45 UTC (rev 4375) @@ -43,7 +43,7 @@ /** * Return query hints associated with this query. Query hints are embedded - * in query strings as namespaces. See {@link QueryHints#NAMESPACE} for more + * in query strings as namespaces. See {@link QueryHints#PREFIX} for more * information. */ Properties getQueryHints(); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java 2011-04-06 20:03:35 UTC (rev 4374) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java 2011-04-06 21:27:45 UTC (rev 4375) @@ -67,7 +67,7 @@ * {@inheritDoc} * <p> * Overridden to capture query hints from SPARQL queries. Query hints are - * embedded in query strings as namespaces. See {@link QueryHints#NAMESPACE} + * embedded in query strings as namespaces. See {@link QueryHints#PREFIX} * for more information. */ @Override @@ -92,7 +92,7 @@ * {@inheritDoc} * <p> * Overridden to capture query hints from SPARQL queries. Query hints are - * embedded in query strings as namespaces. See {@link QueryHints#NAMESPACE} + * embedded in query strings as namespaces. See {@link QueryHints#PREFIX} * for more information. */ @Override @@ -113,7 +113,7 @@ * {@inheritDoc} * <p> * Overridden to capture query hints from SPARQL queries. Query hints are - * embedded in query strings as namespaces. See {@link QueryHints#NAMESPACE} + * embedded in query strings as namespaces. See {@link QueryHints#PREFIX} * for more information. */ @Override @@ -134,7 +134,7 @@ * {@inheritDoc} * <p> * Overridden to capture query hints from SPARQL queries. Query hints are - * embedded in query strings as namespaces. See {@link QueryHints#NAMESPACE} + * embedded in query strings as namespaces. See {@link QueryHints#PREFIX} * for more information. */ @Override @@ -319,7 +319,7 @@ /** * Parse query hints from a query string. Query hints are embedded in the * query string via special namespaces. - * See {@link QueryHints#NAMESPACE} for more information. + * See {@link QueryHints#PREFIX} for more information. */ private Properties parseQueryHints(final QueryLanguage ql, final String queryString, final String baseURI) @@ -339,7 +339,7 @@ for (Map.Entry<String, String> prefix : prefixes.entrySet()) { // if we see one that matches the magic namespace, try // to parse it - if (prefix.getKey().equalsIgnoreCase(QueryHints.NAMESPACE)) { + if (prefix.getKey().equalsIgnoreCase(QueryHints.PREFIX)) { String hints = prefix.getValue(); // has to have a # and it can't be at the end int i = hints.indexOf('#'); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailTupleQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailTupleQuery.java 2011-04-06 20:03:35 UTC (rev 4374) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailTupleQuery.java 2011-04-06 21:27:45 UTC (rev 4375) @@ -22,7 +22,7 @@ /** * Query hints are embedded in query strings as namespaces. - * See {@link QueryHints#NAMESPACE} for more information. + * See {@link QueryHints#PREFIX} for more information. */ private final Properties queryHints; @@ -45,7 +45,7 @@ *{@inheritDoc} * <p> * Overridden to use query hints from SPARQL queries. Query hints are - * embedded in query strings as namespaces. See {@link QueryHints#NAMESPACE} + * embedded in query strings as namespaces. See {@link QueryHints#PREFIX} * for more information. */ @Override Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHints.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHints.java 2011-04-06 20:03:35 UTC (rev 4374) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHints.java 2011-04-06 21:27:45 UTC (rev 4375) @@ -28,6 +28,7 @@ package com.bigdata.rdf.sail; import com.bigdata.bop.BOp; +import com.bigdata.bop.controller.SubqueryHashJoinOp; /** * Query hint directives understood by a bigdata SPARQL end point. @@ -54,9 +55,9 @@ * {@link BOp.Annotations}. A list of the known directives is declared by * this interface. */ - String NAMESPACE = "BIGDATA_QUERY_HINTS"; + String PREFIX = "BIGDATA_QUERY_HINTS"; - String PREFIX = "http://www.bigdata.com/queryHints#"; + String NAMESPACE = "http://www.bigdata.com/queryHints#"; /** * Specify the query optimizer. For example, you can disable the query @@ -95,5 +96,24 @@ * @see #TAG */ String DEFAULT_TAG = ""; + + + /** + * If true, this query hint will let the evaluation strategy know it should + * try to use the {@link SubqueryHashJoinOp} to perform a hash join between + * subqueries. Subqueries are identified in several ways: either an optional + * join group, or a set of tails within one join group that create a cross + * product if run normally (i.e. multiple free text searches). + * + * <pre> + * PREFIX BIGDATA_QUERY_HINTS: <http://www.bigdata.com/queryHints#com.bigdata.rdf.sail.QueryHints.hashJoin=true> + * </pre> + */ + String HASH_JOIN = QueryHints.class.getName() + ".hashJoin"; + /** + * @see #HASH_JOIN + */ + String DEFAULT_HASH_JOIN = "false"; + } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryOptimizerEnum.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryOptimizerEnum.java 2011-04-06 20:03:35 UTC (rev 4374) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryOptimizerEnum.java 2011-04-06 21:27:45 UTC (rev 4375) @@ -68,6 +68,6 @@ Runtime; public static String queryHint(final QueryOptimizerEnum val) { - return "prefix "+QueryHints.NAMESPACE+": <"+QueryHints.PREFIX+QueryHints.OPTIMIZER+"="+val+"> "; + return "prefix "+QueryHints.PREFIX+": <"+QueryHints.NAMESPACE+QueryHints.OPTIMIZER+"="+val+"> "; } } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHints.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHints.java 2011-04-06 20:03:35 UTC (rev 4374) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHints.java 2011-04-06 21:27:45 UTC (rev 4375) @@ -101,7 +101,7 @@ { - final String query = "PREFIX " + QueryHints.NAMESPACE + final String query = "PREFIX " + QueryHints.PREFIX + ": " + "<http://www.bigdata.com/queryOption#" + // PipelineOp.Annotations.MAX_PARALLEL + "=-5" // + "&" + "com.bigdata.fullScanTreshold=1000" // This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-04-08 18:52:29
|
Revision: 4382 http://bigdata.svn.sourceforge.net/bigdata/?rev=4382&view=rev Author: thompsonbry Date: 2011-04-08 18:52:23 +0000 (Fri, 08 Apr 2011) Log Message: ----------- - The feature to control the size of the thread pool used to *run* the queries was dropped. This needs to be put back in. I've made comments in QueryServlet for this. It is used to control the query parallelism, which is different from the http connection parallelism. - I've taken out "-stop". It's just not necessary. - I've taken out the 'direct' API stuff. It was committed with that enabled and this is breaking our published API. - The status page needed to specify the mime type as text/html. - Perhaps the single best thing that we could add right now is a page with a FORM for typing in SPARQL queries so you can run them against the REST API. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataContext.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/JettySparqlServer.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/StatusServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestJettySparqlServer_StartStop.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataContext.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataContext.java 2011-04-08 13:02:36 UTC (rev 4381) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataContext.java 2011-04-08 18:52:23 UTC (rev 4382) @@ -211,11 +211,11 @@ */ public int queryThreadPoolSize = 8; - /** - * The capacity of the buffers for the pipe connecting the running query - * to the HTTP response. - */ - public int bufferCapacity = Bytes.kilobyte32 * 1; +// /** +// * The capacity of the buffers for the pipe connecting the running query +// * to the HTTP response. +// */ +// public int bufferCapacity = Bytes.kilobyte32 * 1; public String resourceBase = "."; Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/JettySparqlServer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/JettySparqlServer.java 2011-04-08 13:02:36 UTC (rev 4381) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/JettySparqlServer.java 2011-04-08 18:52:23 UTC (rev 4382) @@ -38,7 +38,11 @@ static private final Logger log = Logger.getLogger(JettySparqlServer.class); - protected static final boolean directServletAccess = true; + /* + * Martyn, I took this out of the code because it was getting committed with + * the wrong value to run the published API! Bryan + */ +// protected static final boolean directServletAccess = false; int m_port = -1; // allow package visibility from JettySparqlCommand @@ -117,34 +121,35 @@ // embedded setup m_handlerMap.put("/status", new ServletHandler(new StatusServlet())); - if (directServletAccess) { - m_handlerMap.put("/query", new ServletHandler(new QueryServlet())); - m_handlerMap.put("/update", new ServletHandler(new UpdateServlet())); - m_handlerMap.put("/delete", new ServletHandler(new DeleteServlet())); - } else { +// if (directServletAccess) { +// m_handlerMap.put("/query", new ServletHandler(new QueryServlet())); +// m_handlerMap.put("/update", new ServletHandler(new UpdateServlet())); +// m_handlerMap.put("/delete", new ServletHandler(new DeleteServlet())); +// } else + { // create implementation servlets new QueryServlet(); new UpdateServlet(); - // still need delete endpoint for delete with body + // still need delete endpoint for delete with body m_handlerMap.put("/delete", new ServletHandler(new DeleteServlet())); } m_handlerMap.put("/", new ServletHandler(new RESTServlet())); - // the "stop" handler is only relevant for the embedded server - m_handlerMap.put("/stop", new AbstractHandler() { - public void handle(String arg0, Request arg1, HttpServletRequest arg2, HttpServletResponse resp) - throws IOException, ServletException { - try { - resp.getWriter().println("Server Stop request received"); - shutdownNow(); - } catch (InterruptedException e) { - // okay - } catch (Exception e) { - e.printStackTrace(); - } - } - }); +// // the "stop" handler is only relevant for the embedded server +// m_handlerMap.put("/stop", new AbstractHandler() { +// public void handle(String arg0, Request arg1, HttpServletRequest arg2, HttpServletResponse resp) +// throws IOException, ServletException { +// try { +// resp.getWriter().println("Server Stop request received"); +// shutdownNow(); +// } catch (InterruptedException e) { +// // okay +// } catch (Exception e) { +// e.printStackTrace(); +// } +// } +// }); final HandlerList handlers = new HandlerList(); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java 2011-04-08 13:02:36 UTC (rev 4381) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java 2011-04-08 18:52:23 UTC (rev 4382) @@ -12,22 +12,28 @@ import org.apache.log4j.Logger; -import com.bigdata.btree.BytesUtil; import com.bigdata.journal.IIndexManager; import com.bigdata.journal.ITransactionService; import com.bigdata.journal.ITx; import com.bigdata.journal.Journal; import com.bigdata.journal.TimestampUtility; -import com.bigdata.rawstore.Bytes; import com.bigdata.rdf.sail.BigdataSail; import com.bigdata.rdf.sail.webapp.BigdataContext.Config; import com.bigdata.service.AbstractDistributedFederation; import com.bigdata.service.IBigdataFederation; import com.bigdata.service.jini.JiniClient; +/** + * Utility class provides a simple SPARQL end point with a REST API. + * + * @author thompsonbry + * @author martyncutcher + * + * @see https://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer + */ public class NanoSparqlServer { - static private final Logger log = Logger.getLogger(JettySparqlServer.class); + static private final Logger log = Logger.getLogger(NanoSparqlServer.class); /** * Run an httpd service exposing a SPARQL endpoint. The service will respond @@ -46,8 +52,6 @@ * * @param args * USAGE:<br/> - * To stop the server:<br/> - * <code>port -stop</code><br/> * To start the server:<br/> * <code>(options) <i>namespace</i> (propertyFile|configFile) )</code> * <p> @@ -84,14 +88,16 @@ * against the most recent commit point on the database. * Regardless, each query will be issued against a read-only * transaction.</dt> - * <dt>bufferCapacity [#bytes]</dt> - * <dd>Specify the capacity of the buffers used to decouple the - * query evaluation from the consumption of the HTTP response by - * the clinet. The capacity may be specified in bytes or - * kilobytes, e.g., <code>5k</code>.</dd> * </dl> * </p> */ +// * To stop the server:<br/> +// * <code>port -stop</code><br/> +// * <dt>bufferCapacity [#bytes]</dt> +// * <dd>Specify the capacity of the buffers used to decouple the +// * query evaluation from the consumption of the HTTP response by +// * the client. The capacity may be specified in bytes or +// * kilobytes, e.g., <code>5k</code>.</dd> static public void main(String[] args) throws Exception { // PropertyConfigurator.configure("C:/CT_Config/ct_test_log4j.properties"); @@ -108,29 +114,29 @@ JettySparqlServer server = null; try { - /* - * First, handle the [port -stop] command, where "port" is the port - * number of the service. This case is a bit different because the - * "-stop" option appears after the "port" argument. - */ - if (args.length == 2) { - if ("-stop".equals(args[1])) { - final int port; - try { - port = Integer.valueOf(args[0]); - } catch (NumberFormatException ex) { - usage(1/* status */, "Could not parse as port# : '" + args[0] + "'"); - // keep the compiler happy wrt [port] being bound. - throw new AssertionError(); - } - // Send stop to server. - sendStop(port); - // Normal exit. - System.exit(0); - } else { - usage(1/* status */, null/* msg */); - } - } +// /* +// * First, handle the [port -stop] command, where "port" is the port +// * number of the service. This case is a bit different because the +// * "-stop" option appears after the "port" argument. +// */ +// if (args.length == 2) { +// if ("-stop".equals(args[1])) { +// final int port; +// try { +// port = Integer.valueOf(args[0]); +// } catch (NumberFormatException ex) { +// usage(1/* status */, "Could not parse as port# : '" + args[0] + "'"); +// // keep the compiler happy wrt [port] being bound. +// throw new AssertionError(); +// } +// // Send stop to server. +// sendStop(port); +// // Normal exit. +// System.exit(0); +// } else { +// usage(1/* status */, null/* msg */); +// } +// } /* * Now that we have that case out of the way, handle all arguments @@ -151,16 +157,16 @@ if (config.queryThreadPoolSize < 0) { usage(1/* status */, "-nthreads must be non-negative, not: " + s); } - } else if (arg.equals("-bufferCapacity")) { - final String s = args[++i]; - final long tmp = BytesUtil.getByteCount(s); - if (tmp < 1) { - usage(1/* status */, "-bufferCapacity must be non-negative, not: " + s); - } - if (tmp > Bytes.kilobyte32 * 100) { - usage(1/* status */, "-bufferCapacity must be less than 100kb, not: " + s); - } - config.bufferCapacity = (int) tmp; +// } else if (arg.equals("-bufferCapacity")) { +// final String s = args[++i]; +// final long tmp = BytesUtil.getByteCount(s); +// if (tmp < 1) { +// usage(1/* status */, "-bufferCapacity must be non-negative, not: " + s); +// } +// if (tmp > Bytes.kilobyte32 * 100) { +// usage(1/* status */, "-bufferCapacity must be less than 100kb, not: " + s); +// } +// config.bufferCapacity = (int) tmp; } else if (arg.equals("-readLock")) { final String s = args[++i]; readLock = Long.valueOf(s); @@ -446,10 +452,10 @@ System.err.println("[options] port namespace (propertyFile|configFile)"); - System.err.println("-OR-"); +// System.err.println("-OR-"); +// +// System.err.println("port -stop"); - System.err.println("port -stop"); - System.exit(status); } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java 2011-04-08 13:02:36 UTC (rev 4381) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java 2011-04-08 18:52:23 UTC (rev 4382) @@ -96,6 +96,15 @@ if (log.isTraceEnabled()) log.trace("Running query: " + queryStr); + /* + * FIXME This needs to run on an ExecutorService with a configured + * thread pool size so we can avoid running too many queries + * concurrently. Please restore the logic for doing this with the + * thread pool scoped appropriately. All non-administrative REST Api + * tasks should adhere to this limit. The limit should not apply to + * normal http requests against non-API services. + */ + queryTask.call(); // Setup the response. Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/StatusServlet.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/StatusServlet.java 2011-04-08 13:02:36 UTC (rev 4381) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/StatusServlet.java 2011-04-08 18:52:23 UTC (rev 4382) @@ -193,7 +193,7 @@ doc.closeAll(current); - buildResponse(resp, HTTP_OK, MIME_TEXT_PLAIN, doc.toString()); + buildResponse(resp, HTTP_OK, MIME_TEXT_HTML, doc.toString()); } catch (IOException e) { throw new RuntimeException(e); } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestJettySparqlServer_StartStop.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestJettySparqlServer_StartStop.java 2011-04-08 13:02:36 UTC (rev 4381) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestJettySparqlServer_StartStop.java 2011-04-08 18:52:23 UTC (rev 4382) @@ -661,7 +661,8 @@ * UPDATE should not be allowed with a GET request */ public void test_GETUPDATE_withBody_NTRIPLES() throws Exception { - if (JettySparqlServer.directServletAccess) { +// if (JettySparqlServer.directServletAccess) + if(false) { HttpURLConnection conn = null; final URL url = new URL(m_serviceURL + "/update?data=stuff"); conn = (HttpURLConnection) url.openConnection(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-04-14 13:55:42
|
Revision: 4398 http://bigdata.svn.sourceforge.net/bigdata/?rev=4398&view=rev Author: thompsonbry Date: 2011-04-14 13:55:29 +0000 (Thu, 14 Apr 2011) Log Message: ----------- Removed the old NanoSparqlServer class from the 'bench' package and its test suite. Restored the thread pool for query processing. Restored pipe for use with DELETE with QUERY to avoid materialization of the entire query result before deleting the data. Fixed the CONSTRUCT test. Modified the response to include the elapsed time as well as the #of statements modified for update operations. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFServletContextListener.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/CountersServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/DeleteServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/StatusServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/UpdateServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/XMLBuilder.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestAll.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HTMLBuilder.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryType.java Removed Paths: ------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlServer.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/bench/TestAll.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/bench/TestNanoSparqlServer.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/bench/TestNanoSparqlServer_StartStop.java Deleted: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlServer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlServer.java 2011-04-13 20:33:10 UTC (rev 4397) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlServer.java 2011-04-14 13:55:29 UTC (rev 4398) @@ -1,2801 +0,0 @@ -/* - -Copyright (C) SYSTAP, LLC 2006-2010. All rights reserved. - -Contact: - SYSTAP, LLC - 4501 Tower Road - Greensboro, NC 27410 - lic...@bi... - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; version 2 of the License. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software -Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -*/ -/* - * Created on May 29, 2010 - */ -package com.bigdata.rdf.sail.bench; - -import info.aduna.xml.XMLWriter; - -import java.io.BufferedInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.PipedInputStream; -import java.io.PipedOutputStream; -import java.io.PrintWriter; -import java.net.HttpURLConnection; -import java.net.URL; -import java.util.Comparator; -import java.util.Date; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.TreeMap; -import java.util.UUID; -import java.util.Vector; -import java.util.concurrent.Callable; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.FutureTask; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; - -import org.apache.log4j.Logger; -import org.openrdf.model.Resource; -import org.openrdf.model.Statement; -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.parser.ParsedQuery; -import org.openrdf.query.parser.QueryParser; -import org.openrdf.query.parser.sparql.SPARQLParserFactory; -import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter; -import org.openrdf.repository.RepositoryException; -import org.openrdf.rio.RDFFormat; -import org.openrdf.rio.RDFHandlerException; -import org.openrdf.rio.RDFParser; -import org.openrdf.rio.RDFParserFactory; -import org.openrdf.rio.RDFParserRegistry; -import org.openrdf.rio.helpers.RDFHandlerBase; -import org.openrdf.rio.rdfxml.RDFXMLParser; -import org.openrdf.rio.rdfxml.RDFXMLWriter; -import org.openrdf.sail.SailException; - -import com.bigdata.bop.BOpUtility; -import com.bigdata.bop.BufferAnnotations; -import com.bigdata.bop.IPredicate; -import com.bigdata.bop.engine.IRunningQuery; -import com.bigdata.bop.engine.QueryEngine; -import com.bigdata.bop.fed.QueryEngineFactory; -import com.bigdata.bop.join.PipelineJoin; -import com.bigdata.btree.BytesUtil; -import com.bigdata.btree.IndexMetadata; -import com.bigdata.counters.httpd.CounterSetHTTPD; -import com.bigdata.journal.IAtomicStore; -import com.bigdata.journal.IBufferStrategy; -import com.bigdata.journal.IIndexManager; -import com.bigdata.journal.ITransactionService; -import com.bigdata.journal.ITx; -import com.bigdata.journal.Journal; -import com.bigdata.journal.RWStrategy; -import com.bigdata.journal.TimestampUtility; -import com.bigdata.rawstore.Bytes; -import com.bigdata.rdf.sail.BigdataSail; -import com.bigdata.rdf.sail.BigdataSailGraphQuery; -import com.bigdata.rdf.sail.BigdataSailRepository; -import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; -import com.bigdata.rdf.sail.BigdataSailTupleQuery; -import com.bigdata.rdf.sail.BigdataSail.BigdataSailConnection; -import com.bigdata.rdf.sail.bench.NanoSparqlClient.QueryType; -import com.bigdata.rdf.store.AbstractTripleStore; -import com.bigdata.rdf.store.DataLoader; -import com.bigdata.relation.AbstractResource; -import com.bigdata.relation.RelationSchema; -import com.bigdata.rwstore.RWStore; -import com.bigdata.service.AbstractDistributedFederation; -import com.bigdata.service.IBigdataFederation; -import com.bigdata.service.jini.JiniClient; -import com.bigdata.sparse.ITPS; -import com.bigdata.util.concurrent.DaemonThreadFactory; -import com.bigdata.util.concurrent.ThreadPoolExecutorBaseStatisticsTask; -import com.bigdata.util.httpd.AbstractHTTPD; -import com.bigdata.util.httpd.NanoHTTPD; - -/** - * A flyweight SPARQL endpoint using HTTP. - * - * @author tho...@us... - * - * @todo Allow configuration options for the sparql endpoint either as URI - * parameters, in the property file, as request headers, or as query hints - * using the PREFIX mechanism. - * - * @todo Isn't there a baseURI option for SPARQL end points? - * - * @todo Add an "?explain" URL query parameter and show the execution plan and - * costs (or make this a navigable option from the set of running queries - * to drill into their running costs and offer an opportunity to kill them - * as well). - * - * @todo Add command to kill a running query, e.g., from the view of the long - * running queries. - * - * @todo Report other performance counters using {@link CounterSetHTTPD} - * - * @todo Simple update protocol. - * - * @todo If the addressed instance uses full transactions, then mutation should - * also use a full transaction. - * - * @todo Remote command to bulk load data from a remote or local resource (it's - * pretty much up to people handling deployment to secure access to - * queries, update requests, and bulk load requests). - * - * @todo Remote command to advance the read-behind point. This will let people - * bulk load a bunch of stuff before advancing queries to read from the - * new consistent commit point. - * - * @todo Review the settings for the {@link RDFParser} instances, e.g., - * verifyData, preserveBNodeIds, etc. Perhaps we should use the same - * defaults as the {@link DataLoader}? - * - * @todo It is possible that we could have concurrent requests which each get - * the unisolated connection. This could cause two problems: (1) we could - * exhaust our request pool, which would cause the server to block; and - * (2) I need to verify that the exclusive semaphore logic for the - * unisolated sail connection works with cross thread access. Someone had - * pointed out a bizarre hole in this.... - * - * @deprecated This has been replaced by the class of the same name in the - * <code>com.bigdata.sail.webapp</code> package. - */ -public class NanoSparqlServer extends AbstractHTTPD { - - /** - * The logger for the concrete {@link NanoSparqlServer} class. The {@link NanoHTTPD} - * class has its own logger. - */ - static private final Logger log = Logger.getLogger(NanoSparqlServer.class); - - /** - * A SPARQL results set in XML. - */ - static public final String MIME_SPARQL_RESULTS_XML = "application/sparql-results+xml"; - - /** - * RDF/XML. - */ - static public final String MIME_RDF_XML = "application/rdf+xml"; - - /** - * The character set used for the response (not negotiated). - */ - static private final String charset = "UTF-8"; - - /** - * The configuration object. - */ - private final Config config; - - /** - * Provides access to the bigdata database. - */ - private final IIndexManager indexManager; - - /** - * @todo use to decide ASK, DESCRIBE, CONSTRUCT, SELECT, EXPLAIN, etc. - */ - private final QueryParser engine; - - /** - * Runs a pool of threads for handling requests. - */ - private final ExecutorService queryService; - - private final LinkedBlockingQueue<byte[]> pipeBufferPool; - - /** - * Metadata about running queries. - */ - private static class RunningQuery { - - /** - * The unique identifier for this query for the {@link NanoSparqlServer}. - */ - final long queryId; - - /** - * The unique identifier for this query for the {@link QueryEngine}. - * - * @see QueryEngine#getRunningQuery(UUID) - */ - final UUID queryId2; - - /** The query. */ - final String query; - - /** The timestamp when the query was accepted (ns). */ - final long begin; - - public RunningQuery(final long queryId, final UUID queryId2, - final String query, final long begin) { - - this.queryId = queryId; - - this.queryId2 = queryId2; - - this.query = query; - - this.begin = begin; - - } - - } - - /** - * The currently executing queries (does not include queries where a client - * has established a connection but the query is not running because the - * {@link #queryService} is blocking). - */ - private final ConcurrentHashMap<Long/* queryId */, RunningQuery> queries = new ConcurrentHashMap<Long, RunningQuery>(); - - /** - * Factory for the query identifiers. - */ - private final AtomicLong queryIdFactory = new AtomicLong(); - - /** - * - * @param config - * The configuration for the server. - * @param indexManager - * The database instance that the server will operate against. - * - * @throws IOException - * @throws SailException - * @throws RepositoryException - */ - public NanoSparqlServer(final Config config, - final IIndexManager indexManager) throws IOException, - SailException, RepositoryException { - - super(config.port); - - if (config.namespace == null) - throw new IllegalArgumentException(); - - if(indexManager == null) - throw new IllegalArgumentException(); - - this.config = config; - - this.indexManager = indexManager; - - // used to parse qeries. - engine = new SPARQLParserFactory().getParser(); - - if (config.queryThreadPoolSize == 0) { - - queryService = (ThreadPoolExecutor) Executors - .newCachedThreadPool(new DaemonThreadFactory - (getClass().getName()+".queryService")); - - // no buffer pool since the #of requests is unbounded. - pipeBufferPool = null; - - } else { - - queryService = (ThreadPoolExecutor) Executors.newFixedThreadPool( - config.queryThreadPoolSize, new DaemonThreadFactory( - getClass().getName() + ".queryService")); - - // create a buffer pool which is reused for each request. - pipeBufferPool = new LinkedBlockingQueue<byte[]>( - config.queryThreadPoolSize); - - for (int i = 0; i < config.queryThreadPoolSize; i++) { - - pipeBufferPool.add(new byte[config.bufferCapacity]); - - } - - } - - if (indexManager.getCollectQueueStatistics()) { - - final long initialDelay = 0; // initial delay in ms. - final long delay = 1000; // delay in ms. - final TimeUnit unit = TimeUnit.MILLISECONDS; - - queueSampleTask = new ThreadPoolExecutorBaseStatisticsTask( - (ThreadPoolExecutor) queryService); - - queueStatsFuture = indexManager.addScheduledTask(queueSampleTask, - initialDelay, delay, unit); - - } else { - - queueSampleTask = null; - - queueStatsFuture = null; - - } - - } - private final ScheduledFuture<?> queueStatsFuture; - private final ThreadPoolExecutorBaseStatisticsTask queueSampleTask; - - /** - * Return a list of the registered {@link AbstractTripleStore}s. - */ - protected List<String> getNamespaces() { - - // the triple store namespaces. - final List<String> namespaces = new LinkedList<String>(); - - // scan the relation schema in the global row store. - final Iterator<ITPS> itr = (Iterator<ITPS>) indexManager - .getGlobalRowStore().rangeIterator(RelationSchema.INSTANCE); - - while (itr.hasNext()) { - - // A timestamped property value set is a logical row with - // timestamped property values. - final ITPS tps = itr.next(); - - // If you want to see what is in the TPS, uncomment this. -// System.err.println(tps.toString()); - - // The namespace is the primary key of the logical row for the - // relation schema. - final String namespace = (String) tps.getPrimaryKey(); - - // Get the name of the implementation class - // (AbstractTripleStore, SPORelation, LexiconRelation, etc.) - final String className = (String) tps.get(RelationSchema.CLASS) - .getValue(); - - try { - final Class<?> cls = Class.forName(className); - if (AbstractTripleStore.class.isAssignableFrom(cls)) { - // this is a triple store (vs something else). - namespaces.add(namespace); - } - } catch (ClassNotFoundException e) { - log.error(e,e); - } - - } - - return namespaces; - - } - - /** - * Return various interesting metadata about the KB state. - * - * @todo The range counts can take some time if the cluster is heavily - * loaded since they must query each shard for the primary statement - * index and the TERM2ID index. - */ - protected StringBuilder getKBInfo(final String namespace, - final long timestamp) { - - final StringBuilder sb = new StringBuilder(); - - BigdataSailRepositoryConnection conn = null; - - try { - - conn = getQueryConnection(namespace, timestamp); - - final AbstractTripleStore tripleStore = conn.getTripleStore(); - - sb.append("class\t = " + tripleStore.getClass().getName() + "\n"); - - sb - .append("indexManager\t = " - + tripleStore.getIndexManager().getClass() - .getName() + "\n"); - - sb.append("namespace\t = " + tripleStore.getNamespace() + "\n"); - - sb.append("timestamp\t = " - + TimestampUtility.toString(tripleStore.getTimestamp()) - + "\n"); - - sb.append("statementCount\t = " + tripleStore.getStatementCount() - + "\n"); - - sb.append("termCount\t = " + tripleStore.getTermCount() + "\n"); - - sb.append("uriCount\t = " + tripleStore.getURICount() + "\n"); - - sb.append("literalCount\t = " + tripleStore.getLiteralCount() + "\n"); - - /* - * Note: The blank node count is only available when using the told - * bnodes mode. - */ - sb - .append("bnodeCount\t = " - + (tripleStore.getLexiconRelation() - .isStoreBlankNodes() ? "" - + tripleStore.getBNodeCount() : "N/A") - + "\n"); - - sb.append(IndexMetadata.Options.BTREE_BRANCHING_FACTOR - + "=" - + tripleStore.getSPORelation().getPrimaryIndex() - .getIndexMetadata().getBranchingFactor() + "\n"); - - sb.append(IndexMetadata.Options.WRITE_RETENTION_QUEUE_CAPACITY - + "=" - + tripleStore.getSPORelation().getPrimaryIndex() - .getIndexMetadata() - .getWriteRetentionQueueCapacity() + "\n"); - - sb.append(BigdataSail.Options.STAR_JOINS + "=" - + conn.getRepository().getSail().isStarJoins() + "\n"); - - sb.append("-- All properties.--\n"); - - // get the triple store's properties from the global row store. - final Map<String, Object> properties = indexManager - .getGlobalRowStore().read(RelationSchema.INSTANCE, - namespace); - - // write them out, - for (String key : properties.keySet()) { - sb.append(key + "=" + properties.get(key)+"\n"); - } - - /* - * And show some properties which can be inherited from - * AbstractResource. These have been mainly phased out in favor of - * BOP annotations, but there are a few places where they are still - * in use. - */ - - sb.append("-- Interesting AbstractResource effective properties --\n"); - - sb.append(AbstractResource.Options.CHUNK_CAPACITY + "=" - + tripleStore.getChunkCapacity() + "\n"); - - sb.append(AbstractResource.Options.CHUNK_OF_CHUNKS_CAPACITY + "=" - + tripleStore.getChunkOfChunksCapacity() + "\n"); - - sb.append(AbstractResource.Options.CHUNK_TIMEOUT + "=" - + tripleStore.getChunkTimeout() + "\n"); - - sb.append(AbstractResource.Options.FULLY_BUFFERED_READ_THRESHOLD + "=" - + tripleStore.getFullyBufferedReadThreshold() + "\n"); - - sb.append(AbstractResource.Options.MAX_PARALLEL_SUBQUERIES + "=" - + tripleStore.getMaxParallelSubqueries() + "\n"); - - /* - * And show some interesting effective properties for the KB, SPO - * relation, and lexicon relation. - */ - sb.append("-- Interesting KB effective properties --\n"); - - sb - .append(AbstractTripleStore.Options.TERM_CACHE_CAPACITY - + "=" - + tripleStore - .getLexiconRelation() - .getProperties() - .getProperty( - AbstractTripleStore.Options.TERM_CACHE_CAPACITY, - AbstractTripleStore.Options.DEFAULT_TERM_CACHE_CAPACITY) + "\n"); - - /* - * And show several interesting properties with their effective - * defaults. - */ - - sb.append("-- Interesting Effective BOP Annotations --\n"); - - sb.append(BufferAnnotations.CHUNK_CAPACITY - + "=" - + tripleStore.getProperties().getProperty( - BufferAnnotations.CHUNK_CAPACITY, - "" + BufferAnnotations.DEFAULT_CHUNK_CAPACITY) - + "\n"); - - sb - .append(BufferAnnotations.CHUNK_OF_CHUNKS_CAPACITY - + "=" - + tripleStore - .getProperties() - .getProperty( - BufferAnnotations.CHUNK_OF_CHUNKS_CAPACITY, - "" - + BufferAnnotations.DEFAULT_CHUNK_OF_CHUNKS_CAPACITY) - + "\n"); - - sb.append(BufferAnnotations.CHUNK_TIMEOUT - + "=" - + tripleStore.getProperties().getProperty( - BufferAnnotations.CHUNK_TIMEOUT, - "" + BufferAnnotations.DEFAULT_CHUNK_TIMEOUT) - + "\n"); - - sb.append(PipelineJoin.Annotations.MAX_PARALLEL_CHUNKS - + "=" - + tripleStore.getProperties().getProperty( - PipelineJoin.Annotations.MAX_PARALLEL_CHUNKS, - "" + PipelineJoin.Annotations.DEFAULT_MAX_PARALLEL_CHUNKS) + "\n"); - - sb - .append(IPredicate.Annotations.FULLY_BUFFERED_READ_THRESHOLD - + "=" - + tripleStore - .getProperties() - .getProperty( - IPredicate.Annotations.FULLY_BUFFERED_READ_THRESHOLD, - "" - + IPredicate.Annotations.DEFAULT_FULLY_BUFFERED_READ_THRESHOLD) - + "\n"); - - // sb.append(tripleStore.predicateUsage()); - - if (tripleStore.getIndexManager() instanceof Journal) { - - final Journal journal = (Journal) tripleStore.getIndexManager(); - - final IBufferStrategy strategy = journal.getBufferStrategy(); - - if (strategy instanceof RWStrategy) { - - final RWStore store = ((RWStrategy) strategy).getRWStore(); - - store.showAllocators(sb); - - } - - } - - } catch (Throwable t) { - - log.warn(t.getMessage(), t); - - } finally { - - if(conn != null) { - try { - conn.close(); - } catch (RepositoryException e) { - log.error(e, e); - } - - } - - } - - return sb; - - } - - /** - * {@inheritDoc} - * <p> - * Overridden to wait until all running queries are complete before - */ - @Override - public void shutdown() { - if(log.isInfoEnabled()) - log.info("Normal shutdown."); - // Stop collecting queue statistics. - if (queueStatsFuture != null) - queueStatsFuture.cancel(true/* mayInterruptIfRunning */); - // Tell NanoHTTP to stop accepting new requests. - super.shutdown(); - // Stop servicing new requests. - queryService.shutdown(); - try { - queryService.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS); - } catch (InterruptedException ex) { - throw new RuntimeException(ex); - } - /* - * Note: This is using the atomic boolean as a lock in addition to - * relying on its visibility guarantee. - */ - synchronized (alive) { - alive.set(false); - alive.notifyAll(); - } - } - - /** - * {@inheritDoc} - * <p> - * Overridden to interrupt all running requests. - * - * FIXME Must abort any open transactions. This does not matter for the - * standalone database, but it will make a difference in scale-out. The - * transaction identifiers could be obtained from the {@link #queries} - * map. - */ - @Override - public void shutdownNow() { - if(log.isInfoEnabled()) - log.info("Normal shutdown."); - // Stop collecting queue statistics. - if (queueStatsFuture != null) - queueStatsFuture.cancel(true/* mayInterruptIfRunning */); - // Immediately stop accepting connections and interrupt open requests. - super.shutdownNow(); - // Interrupt all running queries. - queryService.shutdownNow(); - /* - * Note: This is using the atomic boolean as a lock in addition to - * relying on its visibility guarantee. - */ - synchronized (alive) { - alive.set(false); - alive.notifyAll(); - } - } - - /** - * Note: This uses an atomic boolean in order to give us a synchronization - * object whose state also serves as a condition variable. findbugs objects, - * but this is a deliberate usage. - */ - private final AtomicBoolean alive = new AtomicBoolean(true); - - /** - * <p> - * Perform an HTTP-POST, which corresponds to the basic CRUD operation - * "create" according to the generic interaction semantics of HTTP REST. The - * operation will be executed against the target namespace per the URI. - * </p> - * - * <pre> - * POST [/namespace/NAMESPACE] - * ... - * Content-Type: - * ... - * - * BODY - * </pre> - * <p> - * Where <code>BODY</code> is the new RDF content using the representation - * indicated by the <code>Content-Type</code>. - * </p> - * <p> - * -OR- - * </p> - * - * <pre> - * POST [/namespace/NAMESPACE] ?uri=URL - * </pre> - * <p> - * Where <code>URI</code> identifies a resource whose RDF content will be - * inserted into the database. The <code>uri</code> query parameter may - * occur multiple times. All identified resources will be loaded within a - * single native transaction. Bigdata provides snapshot isolation so you can - * continue to execute queries against the last commit point while this - * operation is executed. - * </p> - * - * <p> - * You can shutdown the server using: - * </p> - * - * <pre> - * POST /stop - * </pre> - * - * <p> - * A status page is available: - * </p> - * - * <pre> - * POST /status - * </pre> - */ - @Override - public Response doPost(final Request req) throws Exception { - - final String uri = req.uri; - - if("/stop".equals(uri)) { - - return doStop(req); - - } - - if("/status".equals(uri)) { - - return doStatus(req); - - } - - final String queryStr = getQueryStr(req.params); - - if (queryStr != null) { - - return doQuery(req); - - } - - final String contentType = req.getContentType(); - - if (contentType != null) { - - return doPostWithBody(req); - - } - - if (req.params.get("uri") != null) { - - return doPostWithURIs(req); - - } - - return new Response(HTTP_BADREQUEST, MIME_TEXT_PLAIN, uri); - - } - - /** - * POST with request body containing statements to be inserted. - * - * @param req - * The request. - * - * @return The response. - * - * @throws Exception - */ - private Response doPostWithBody(final Request req) throws Exception { - - final String baseURI = "";// @todo baseURI query parameter? - - final String namespace = getNamespace(req.uri); - - final String contentType = req.getContentType(); - - if (contentType == null) - throw new UnsupportedOperationException(); - - if (log.isInfoEnabled()) - log.info("Request body: " + contentType); - - final RDFFormat format = RDFFormat.forMIMEType(contentType); - - if (format == null) { - return new Response(HTTP_BADREQUEST, MIME_TEXT_PLAIN, - "Content-Type not recognized as RDF: " - + contentType); - } - - if (log.isInfoEnabled()) - log.info("RDFFormat=" + format); - - final RDFParserFactory rdfParserFactory = RDFParserRegistry - .getInstance().get(format); - - if (rdfParserFactory == null) { - return new Response(HTTP_INTERNALERROR, MIME_TEXT_PLAIN, - "Parser not found: Content-Type=" + contentType); - } - - try { - - // resolve the default namespace. - final AbstractTripleStore tripleStore = (AbstractTripleStore) indexManager - .getResourceLocator().locate(namespace, ITx.UNISOLATED); - - if (tripleStore == null) - return new Response(HTTP_BADREQUEST, MIME_TEXT_PLAIN, - "Not found: namespace=" + namespace); - - final AtomicLong nmodified = new AtomicLong(0L); - - // Wrap with SAIL. - final BigdataSail sail = new BigdataSail(tripleStore); - BigdataSailConnection conn = null; - try { - - sail.initialize(); - conn = sail.getConnection(); - - /* - * There is a request body, so let's try and parse it. - */ - - final RDFParser rdfParser = rdfParserFactory.getParser(); - - rdfParser.setValueFactory(tripleStore.getValueFactory()); - - rdfParser.setVerifyData(true); - - rdfParser.setStopAtFirstError(true); - - rdfParser - .setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); - - rdfParser.setRDFHandler(new AddStatementHandler(conn,nmodified)); - - /* - * Run the parser, which will cause statements to be inserted. - */ - rdfParser.parse(req.getInputStream(), baseURI); - - // Commit the mutation. - conn.commit(); - - return new Response(HTTP_OK, MIME_TEXT_PLAIN, nmodified.get() - + " statements modified."); - - } finally { - - if (conn != null) - conn.close(); - -// sail.shutDown(); - - } - - } catch (Exception ex) { - - // Will be rendered as an INTERNAL_ERROR. - throw new RuntimeException(ex); - - } - - } - - /** - * Helper class adds statements to the sail as they are visited by a parser. - */ - private static class AddStatementHandler extends RDFHandlerBase { - - private final BigdataSailConnection conn; - private final AtomicLong nmodified; - - public AddStatementHandler(final BigdataSailConnection conn, - final AtomicLong nmodified) { - this.conn = conn; - this.nmodified = nmodified; - } - - public void handleStatement(Statement stmt) throws RDFHandlerException { - - try { - - conn.addStatement(// - stmt.getSubject(), // - stmt.getPredicate(), // - stmt.getObject(), // - (Resource[]) (stmt.getContext() == null ? null - : new Resource[] { stmt.getContext() })// - ); - - } catch (SailException e) { - - throw new RDFHandlerException(e); - - } - - nmodified.incrementAndGet(); - - } - - } - - /** - * Helper class removes statements from the sail as they are visited by a parser. - */ - private static class RemoveStatementHandler extends RDFHandlerBase { - - private final BigdataSailConnection conn; - private final AtomicLong nmodified; - - public RemoveStatementHandler(final BigdataSailConnection conn, - final AtomicLong nmodified) { - this.conn = conn; - this.nmodified = nmodified; - } - - public void handleStatement(Statement stmt) throws RDFHandlerException { - - try { - - conn.removeStatements(// - stmt.getSubject(), // - stmt.getPredicate(), // - stmt.getObject(), // - (Resource[]) (stmt.getContext() == null ? null - : new Resource[] { stmt.getContext() })// - ); - - } catch (SailException e) { - - throw new RDFHandlerException(e); - - } - - nmodified.incrementAndGet(); - - } - - } - - /** - * POST with URIs of resources to be inserted. - * - * @param req - * The request. - * - * @return The response. - * - * @throws Exception - */ - private Response doPostWithURIs(final Request req) throws Exception { - - final String namespace = getNamespace(req.uri); - - final String contentType = req.getContentType(); - - final Vector<String> uris = req.params.get("uri"); - - if (uris == null) - throw new UnsupportedOperationException(); - - if (uris.isEmpty()) - return new Response(HTTP_OK, MIME_TEXT_PLAIN, - "0 statements modified"); - - if (log.isInfoEnabled()) - log.info("URIs: " + uris); - - // Before we do anything, make sure we have valid URLs. - final Vector<URL> urls = new Vector<URL>(uris.size()); - for (String uri : uris) { - urls.add(new URL(uri)); - } - - try { - - // resolve the default namespace. - final AbstractTripleStore tripleStore = (AbstractTripleStore) indexManager - .getResourceLocator().locate(namespace, ITx.UNISOLATED); - - if (tripleStore == null) - return new Response(HTTP_BADREQUEST, MIME_TEXT_PLAIN, - "Not found: namespace=" + namespace); - - final AtomicLong nmodified = new AtomicLong(0L); - - // Wrap with SAIL. - final BigdataSail sail = new BigdataSail(tripleStore); - BigdataSailConnection conn = null; - try { - - conn = sail.getConnection(); - - for (URL url : urls) { - - HttpURLConnection hconn = null; - try { - - hconn = (HttpURLConnection) url.openConnection(); - hconn.setRequestMethod(NanoHTTPD.GET); - hconn.setReadTimeout(0);// no timeout? http param? - - /* - * There is a request body, so let's try and parse it. - */ - - final RDFFormat format = RDFFormat - .forMIMEType(contentType); - - if (format == null) { - return new Response(HTTP_BADREQUEST, - MIME_TEXT_PLAIN, - "Content-Type not recognized as RDF: " - + contentType); - } - - final RDFParserFactory rdfParserFactory = RDFParserRegistry - .getInstance().get(format); - - if (rdfParserFactory == null) { - return new Response(HTTP_INTERNALERROR, - MIME_TEXT_PLAIN, - "Parser not found: Content-Type=" - + contentType); - } - - final RDFParser rdfParser = rdfParserFactory - .getParser(); - - rdfParser - .setValueFactory(tripleStore.getValueFactory()); - - rdfParser.setVerifyData(true); - - rdfParser.setStopAtFirstError(true); - - rdfParser - .setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); - - rdfParser.setRDFHandler(new AddStatementHandler(conn, nmodified)); - - /* - * Run the parser, which will cause statements to be - * inserted. - */ - - rdfParser.parse(req.getInputStream(), url - .toExternalForm()/* baseURL */); - - } finally { - - if (hconn != null) - hconn.disconnect(); - - } // next URI. - - } - - // Commit the mutation. - conn.commit(); - - return new Response(HTTP_OK, MIME_TEXT_PLAIN, nmodified.get() - + " statements modified."); - - } finally { - - if (conn != null) - conn.close(); - -// sail.shutDown(); - - } - - } catch (Exception ex) { - - // Will be rendered as an INTERNAL_ERROR. - throw new RuntimeException(ex); - - } - - } - - /** - * Halt the server. - * - * @param req - * The request. - * - * @return The response. - * - * @throws Exception - */ - private Response doStop(final Request req) throws Exception { - - /* - * Create a new thread to run shutdown since we do not want this to - * block on the queryService. - */ - final Thread t = new Thread(new Runnable() { - - public void run() { - - log.warn("Will shutdown."); - - try { - - /* - * Sleep for a bit so the Response will be delivered - * before we shutdown the server. - */ - - Thread.sleep(100/* ms */); - - } catch (InterruptedException ex) { - - // ignore - - } - - // Shutdown the server. - shutdown(); - - } - - }); - - t.setDaemon(true); - - // Start the shutdown thread. - t.start(); - -// // Shutdown. -// shutdown(); - - /* - * Note: Client might not see this response since the shutdown thread - * may have already terminated the httpd service. - */ - return new Response(HTTP_OK, MIME_TEXT_PLAIN, "Shutting down."); - - } - - /** - * Accepts SPARQL queries. - * - * <pre> - * GET [/namespace/NAMESPACE] ?query=QUERY - * </pre> - * - * Where <code>QUERY</code> is the SPARQL query. - */ - @Override - public Response doGet(final Request req) throws Exception { - - final String uri = req.uri; - - if("/status".equals(uri)) { - - return doStatus(req); - - } - - final String queryStr = getQueryStr(req.params); - - if (queryStr != null) { - - return doQuery(req); - - } - - return new Response(HTTP_NOTFOUND, MIME_TEXT_PLAIN, uri); - - } - -// /** -// * TODO Perform an HTTP-PUT, which corresponds to the basic CRUD operation -// * "update" according to the generic interaction semantics of HTTP REST. -// * -// */ -// @Override -// public Response doPut(final Request req) { -// -// return new Response(HTTP_NOTFOUND, MIME_TEXT_PLAIN, req.uri); -// -// } - - /** - * REST DELETE. There are two forms for this operation. - * - * <pre> - * DELETE [/namespace/NAMESPACE] - * ... - * Content-Type - * ... - * - * BODY - * - * </pre> - * <p> - * BODY contains RDF statements according to the specified Content-Type. - * Statements parsed from the BODY are deleted from the addressed namespace. - * </p> - * <p> - * -OR- - * </p> - * - * <pre> - * DELETE [/namespace/NAMESPACE] ?query=... - * </pre> - * <p> - * Where <code>query</code> is a CONSTRUCT or DESCRIBE query. Statements are - * materialized using the query from the addressed namespace are deleted - * from that namespace. - * </p> - */ - @Override - public Response doDelete(final Request req) { - - final String contentType = req.getContentType(); - - final String queryStr = getQueryStr(req.params); - - if(contentType != null) { - - return doDeleteWithBody(req); - - } else if (queryStr != null) { - - return doDeleteWithQuery(req); - - } - - return new Response(HTTP_BADREQUEST, MIME_TEXT_PLAIN, ""); - - } - - /** - * Delete all statements materialized by a DESCRIBE or CONSTRUCT query. - * <p> - * Note: To avoid materializing the statements, this runs the query against - * the last commit time. This is done while it is holding the unisolated - * connection which prevents concurrent modifications. Therefore the entire - * SELECT + DELETE operation is ACID. - */ - private Response doDeleteWithQuery(final Request req) { - - final String baseURI = "";// @todo baseURI query parameter? - - final String namespace = getNamespace(req.uri); - - final String queryStr = getQueryStr(req.params); - - if(queryStr == null) - throw new UnsupportedOperationException(); - - if (log.isInfoEnabled()) - log.info("delete with query: "+queryStr); - - try { - - // resolve the default namespace. - final AbstractTripleStore tripleStore = (AbstractTripleStore) indexManager - .getResourceLocator().locate(namespace, ITx.UNISOLATED); - - if (tripleStore == null) - return new Response(HTTP_BADREQUEST, MIME_TEXT_PLAIN, - "Not found: namespace=" + namespace); - - /* - * Note: pipe is drained by this thread to consume the query - * results, which are the statements to be deleted. - */ - final PipedOutputStream os = new PipedOutputStream(); - final InputStream is = newPipedInputStream(os); - try { - - final AbstractQueryTask queryTask = getQueryTask(namespace, - ITx.READ_COMMITTED, queryStr, req.params, req.headers, - os); - - switch (queryTask.queryType) { - case DESCRIBE: - case CONSTRUCT: - break; - default: - return new Response(HTTP_BADREQUEST, MIME_TEXT_PLAIN, - "Must be DESCRIBE or CONSTRUCT query."); - } - - final AtomicLong nmodified = new AtomicLong(0L); - - // Wrap with SAIL. - final BigdataSail sail = new BigdataSail(tripleStore); - BigdataSailConnection conn = null; - try { - - sail.initialize(); - - // get the unisolated connection. - conn = sail.getConnection(); - - final RDFXMLParser rdfParser = new RDFXMLParser( - tripleStore.getValueFactory()); - - rdfParser.setVerifyData(false); - - rdfParser.setStopAtFirstError(true); - - rdfParser - .setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); - - rdfParser.setRDFHandler(new RemoveStatementHandler(conn, nmodified)); - - /* - * Run the parser, which will cause statements to be - * deleted. - */ - rdfParser.parse(is, baseURI); - - // Commit the mutation. - conn.commit(); - - } finally { - - if (conn != null) - conn.close(); - -// sail.shutDown(); - - } - - return new Response(HTTP_OK, MIME_TEXT_PLAIN, nmodified.get() - + " statements modified."); - - } catch (Throwable t) { - - throw launderThrowable(t, os, queryStr); - - } - - } catch (Exception ex) { - - // Will be rendered as an INTERNAL_ERROR. - throw new RuntimeException(ex); - - } - - } - - /** - * DELETE request with a request body containing the statements to be - * removed. - */ - private Response doDeleteWithBody(final Request req) { - - final String baseURI = "";// @todo baseURI query parameter? - - final String namespace = getNamespace(req.uri); - - final String contentType = req.getContentType(); - - if (contentType == null) - throw new UnsupportedOperationException(); - - if (log.isInfoEnabled()) - log.info("Request body: " + contentType); - - try { - - // resolve the default namespace. - final AbstractTripleStore tripleStore = (AbstractTripleStore) indexManager - .getResourceLocator().locate(namespace, ITx.UNISOLATED); - - if (tripleStore == null) - return new Response(HTTP_BADREQUEST, MIME_TEXT_PLAIN, - "Not found: namespace=" + namespace); - - final AtomicLong nmodified = new AtomicLong(0L); - - // Wrap with SAIL. - final BigdataSail sail = new BigdataSail(tripleStore); - BigdataSailConnection conn = null; - try { - - sail.initialize(); - conn = sail.getConnection(); - - /* - * There is a request body, so let's try and parse it. - */ - - final RDFFormat format = RDFFormat.forMIMEType(contentType); - - if (format == null) { - return new Response(HTTP_BADREQUEST, MIME_TEXT_PLAIN, - "Content-Type not recognized as RDF: " - + contentType); - } - - final RDFParserFactory rdfParserFactory = RDFParserRegistry - .getInstance().get(format); - - if (rdfParserFactory == null) { - return new Response(HTTP_INTERNALERROR, MIME_TEXT_PLAIN, - "Parser not found: Content-Type=" + contentType); - } - - final RDFParser rdfParser = rdfParserFactory.getParser(); - - rdfParser.setValueFactory(tripleStore.getValueFactory()); - - rdfParser.setVerifyData(true); - - rdfParser.setStopAtFirstError(true); - - rdfParser - .setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); - - rdfParser.setRDFHandler(new RemoveStatementHandler(conn, - nmodified)); - - /* - * Run the parser, which will cause statements to be deleted. - */ - rdfParser.parse(req.getInputStream(), baseURI); - - // Commit the mutation. - conn.commit(); - - return new Response(HTTP_OK, MIME_TEXT_PLAIN, nmodified.get() - + " statements modified."); - - } finally { - - if (conn != null) - conn.close(); - -// sail.shutDown(); - - } - - } catch (Exception ex) { - - // Will be rendered as an INTERNAL_ERROR. - throw new RuntimeException(ex); - - } - - } - - /** - * Return the namespace which will be used to execute the query. The - * namespace is represented by the first component of the URI. If there is - * no namespace, then return the configured default namespace. - * - * @param uri - * The URI path string. - * - * @return The namespace. - */ - private String getNamespace(final String uri) { - -// // locate the "//" after the protocol. -// final int index = uri.indexOf("//"); - - if(!uri.startsWith("/namespace/")) { - // use the default namespace. - return config.namespace; - } - - // locate the next "/" in the URI path. - final int beginIndex = uri.indexOf('/', 1/* fromIndex */); - - // locate the next "/" in the URI path. - int endIndex = uri.indexOf('/', beginIndex + 1/* fromIndex */); - - if (endIndex == -1) { - // use the rest of the URI. - endIndex = uri.length(); - } - - // return the namespace. - return uri.substring(beginIndex + 1, endIndex); - - } - - /** - * Return the timestamp which will be used to execute the query. The uri - * query parameter <code>timestamp</code> may be used to communicate the - * desired commit time against which the query will be issued. If that uri - * query parameter is not given then the default configured commit time will - * be used. Applications may create protocols for sharing interesting commit - * times as reported by {@link IAtomicStore#commit()} or by a distributed - * data loader (for scale-out). - * - * @todo the configured timestamp should only be used for the default - * namespace (or it should be configured for each graph explicitly, or - * we should bundle the (namespace,timestamp) together as a single - * object). - */ - private long getTimestamp(final String uri, - final LinkedHashMap<String, Vector<String>> params) { - - final Vector<String> tmp = params.get("timestamp"); - - if (tmp == null || tmp.size() == 0 || tmp.get(0) == null) { - - return config.timestamp; - - } - - final String val = tmp.get(0); - - return Long.valueOf(val); - - } - - /** - * Respond to a status request. - * - * @param uri - * @param method - * @param header - * @param params - * @return - * @throws Exception - * - * @todo add statistics for top-N queries based on query template - * identifiers, which can be communicated using query hints. See // - * wait for the subquery. - */ - private Response doStatus(final Request req) throws Exception { - - // SPARQL queries accepted by the SPARQL end point. - final boolean showQueries = req.params.get("showQueries") != null; - - // IRunningQuery objects currently running on the query controller. - final boolean showRunningQueries = req.params.get("showRunningQueries") != null; - - // Information about the KB (stats, properties). - final boolean showKBInfo = req.params.get("showKBInfo") != null; - - // bigdata namespaces known to the index manager. - final boolean showNamespaces = req.params.get("showNamespaces") != null; - - final StringBuilder sb = new StringBuilder(); - - sb.append("Accepted query count=" + queryIdFactory.get() + "\n"); - - sb.append("Running query count=" + queries.size() + "\n"); - - if (showNamespaces) { - - final List<String> namespaces = getNamespaces(); - - sb.append("Namespaces: "); - - for (String s : namespaces) { - - sb.append(s); - - } - - sb.append("\n"); - - } - - if (showKBInfo) { - - // General information on the connected kb. - sb.append(getKBInfo(getNamespace(req.uri), getTimestamp(req.uri, - req.params))); - - } - - if(queueSampleTask != null) { - - // Performance counters for the NSS queries. - sb.append(queueSampleTask.getCounters().toString()); - - } - -// if (indexManager instanceof IJournal) { -// -// /* -// * Stuff which is specific to a local/embedded database. -// */ -// -// final AbstractJournal jnl = (AbstractJournal) indexManager; -// -// sb.append("file\t= " + jnl.getFile() + "\n"); -// -// sb.append("BufferMode\t= " -// + jnl.getBufferStrategy().getBufferMode() + "\n"); -// -// sb.append("nextOffset\t= " + jnl.getRootBlockView().getNextOffset() -// + "\n"); -// -// if (LRUNexus.INSTANCE != null) { -// -// sb.append(LRUNexus.Options.CLASS + "=" -// + LRUNexus.INSTANCE.toString().getClass() + "\n"); -// -// sb.append(LRUNexus.INSTANCE.toString() + "\n"); -// -// } else { -// -// sb.append("LRUNexus is disabled."); -// -// } -// -// // show the disk access details. -// sb.append(jnl.getBufferStrategy().getCounters().toString()+"\n"); -// -// } - - if(showQueries) { - - /* - * Show the queries which are currently executing (accepted by the NanoSparqlServer). - */ - - sb.append("\n"); - - final long now = System.nanoTime(); - - final TreeMap<Long, RunningQuery> ages = new TreeMap<Long, RunningQuery>(new Comparator<Long>() { - /** - * Comparator puts the entries into descending order by the query - * execution time (longest running queries are first). - */ - public int compare(final Long o1, final Long o2) { - if(o1.longValue()<o2.longValue()) return 1; - if(o1.longValue()>o2.longValue()) return -1; - return 0; - } - }); - - { - - final Iterator<RunningQuery> itr = queries.values().iterator(); - - while (itr.hasNext()) { - - final RunningQuery query = itr.next(); - - final long age = now - query.begin; - - ages.put(age, query); - - } - - } - - { - - final Iterator<RunningQuery> itr = ages.values().iterator(); - - while (itr.hasNext()) { - - final RunningQuery query = itr.next(); - - final long age = now - query.begin; - - sb.append("age=" - + java.util.concurrent.TimeUnit.NANOSECONDS - .toMillis(age) + "ms, queryId=" - + query.queryId + "\n"); - sb.append(query.query + "\n"); - - } - - } - - } - - if(showRunningQueries) { - - /* - * Show the queries which are currently executing (actually running - * on the QueryEngine). - */ - - sb.append("\n"); - - final QueryEngine queryEngine = (QueryEngine) QueryEngineFactory - .getQueryController(indexManager); - - final UUID[] queryIds = queryEngine.getRunningQueries(); - -// final long now = System.nanoTime(); - - final TreeMap<Long, IRunningQuery> ages = new TreeMap<Long, IRunningQuery>(new Comparator<Long>() { - /** - * Comparator puts the entries into descending order by the query - * execution time (longest running queries are first). - */ - public int compare(final Long o1, final Long o2) { - if(o1.longValue()<o2.longValue()) return 1; - if(o1.longValue()>o2.longValue()) return -1; - return 0; - } - }); - - for(UUID queryId : queryIds) { - - final IRunningQuery query = queryEngine - .getRunningQuery(queryId); - - if (query == null) { - // Already terminated. - continue; - } - - ages.put(query.getElapsed(), query); - - } - - { - - final Iterator<IRunningQuery> itr = ages.values().iterator(); - - while (itr.hasNext()) { - - final IRunningQuery query = itr.next(); - - if (query.isDone() && query.getCause() != null) { - // Already terminated (normal completion). - continue; - } - - /* - * @todo The runstate and stats could be formatted into an - * HTML table ala QueryLog or RunState. - */ - sb.append("age=" + query.getElapsed() + "ms\n"); - sb.append("queryId=" + query.getQueryId() + "\n"); - sb.append(query.toString()); - sb.append("\n"); - sb.append(BOpUtility.toString(query.getQuery())); - sb.append("\n"); - sb.append("\n"); - -// final long age = query.getElapsed(); -// sb.append("age=" -// + java.util.concurrent.TimeUnit.NANOSECONDS -// .toMillis(age) + "ms, queryId=" -// + query.getQueryId() + "\nquery=" -// + BOpUtility.toString(query.getQuery()) + "\n"); - - } - - } - - } - - return new Response(HTTP_OK, MIME_TEXT_PLAIN, sb.toString()); - - } - - /** - * Answer a SPARQL query. - * - * @param uri - * @param method - * @param header - * @param params - * @return - * @throws Exception - */ - public Response doQuery(final Request req) throws Exception { - - final String namespace = getNamespace(req.uri); - - final long timestamp = getTimestamp(req.uri, req.params); - - final String queryStr = getQueryStr(req.params); - - if (queryStr == null) - return new Response(HTTP_BADREQUEST, MIME_TEXT_PLAIN, - "Specify query using ?query=...."); - - /* - * Setup pipes. The [os] will be passed into the task that executes the - * query. The [is] will be passed into the Response. The task is - * executed on a thread pool. - * - * Note: If the client closes the connection, then the InputStream - * passed into the Response will be closed and the task will terminate - * rather than running on in the background with a disconnected client. - */ - final PipedOutputStream os = new PipedOutputStream(); - final InputStream is = newPipedInputStream(os); - try { - - final AbstractQueryTask queryTask = getQueryTask(namespace, timestamp, - queryStr, req.params, req.headers, os); - - final FutureTask<Void> ft = new FutureTask<Void>(queryTask); - - // Setup the response. - // TODO Move charset choice into conneg logic. - final Response r = new Response(HTTP_OK, queryTask.mimeType - + "; charset='" + charset + "'", is); - - if (log.isTraceEnabled()) - log.trace("Will run query: " + queryStr); - - // Begin executing the query (asynchronous). - queryService.execute(ft); - - /* - * Sets the cache behavior. - */ - // r.addHeader("Cache-Control", - // "max-age=60, must-revalidate, public"); - // to disable caching. - // r.addHeader("Cache-Control", "no-cache"); - - return r; - - } catch (Throwable e) { - - throw launderThrowable(e, os, queryStr); - - } - - } - - /** - * Return the query string. - * - * @param params - * - * @return The query string -or- <code>null</code> if none was specified. - */ - private String getQueryStr(final Map<String, Vector<String>> params) { - - final String queryStr; - - final Vector<String> tmp = params.get("query"); - - if (tmp == null || tmp.isEmpty() || tmp.get(0) == null) { - queryStr = null; - } else { - queryStr = tmp.get(0); - - if (log.isDebugEnabled()) - log.debug("query: " + queryStr); - - } - - return queryStr; - - } - - /** - * Class reuses the a pool of buffers for each pipe. This is a significant - * performance win. - * - * @see NanoSparqlServer#pipeBufferPool - */ - private class MyPipedInputStream extends PipedInputStream { - - MyPipedInputStream(final PipedOutputStream os) throws IOException, - InterruptedException { - - super(os, 1/* size */); - - // override the buffer. - this.buffer = pipeBufferPool.take(); - - } - - public void close() throws IOException { - - super.close(); - - // return the buffer to the pool. - pipeBufferPoo... [truncated message content] |
From: <tho...@us...> - 2011-04-14 14:05:07
|
Revision: 4399 http://bigdata.svn.sourceforge.net/bigdata/?rev=4399&view=rev Author: thompsonbry Date: 2011-04-14 14:04:58 +0000 (Thu, 14 Apr 2011) Log Message: ----------- Modified NSS to use the default thread pool size by default. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java 2011-04-14 13:55:29 UTC (rev 4398) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java 2011-04-14 14:04:58 UTC (rev 4399) @@ -79,60 +79,61 @@ // static private final Logger log = Logger.getLogger(NanoSparqlServer.class); - /** - * Run an httpd service exposing a SPARQL endpoint. The service will respond - * to the following URL paths: - * <dl> - * <dt>http://localhost:port/</dt> - * <dd>The SPARQL end point for the default namespace as specified by the - * <code>namespace</code> command line argument.</dd> - * <dt>http://localhost:port/namespace/NAMESPACE</dt> - * <dd>where <code>NAMESPACE</code> is the namespace of some triple store or - * quad store, may be used to address ANY triple or quads store in the - * bigdata instance.</dd> - * <dt>http://localhost:port/status</dt> - * <dd>A status page.</dd> - * </dl> - * - * @param args - * USAGE:<br/> - * To start the server:<br/> - * <code>(options) <i>namespace</i> (propertyFile|configFile) )</code> - * <p> - * <i>Where:</i> - * <dl> - * <dt>port</dt> - * <dd>The port on which the service will respond -or- - * <code>0</code> to use any open port.</dd> - * <dt>namespace</dt> - * <dd>The namespace of the default SPARQL endpoint (the - * namespace will be <code>kb</code> if none was specified when - * the triple/quad store was created).</dd> - * <dt>propertyFile</dt> - * <dd>A java properties file for a standalone {@link Journal}.</dd> - * <dt>configFile</dt> - * <dd>A jini configuration file for a bigdata federation.</dd> - * </dl> - * and <i>options</i> are any of: - * <dl> - * <dt>-nthreads</dt> - * <dd>The #of threads which will be used to answer SPARQL - * queries (default 8).</dd> - * <dt>-forceOverflow</dt> - * <dd>Force a compacting merge of all shards on all data - * services in a bigdata federation (this option should only be - * used for benchmarking purposes).</dd> - * <dt>readLock</dt> - * <dd>The commit time against which the server will assert a - * read lock by holding open a read-only transaction against that - * commit point. When given, queries will default to read against - * this commit point. Otherwise queries will default to read - * against the most recent commit point on the database. - * Regardless, each query will be issued against a read-only - * transaction.</dt> - * </dl> - * </p> - */ + /** + * Run an httpd service exposing a SPARQL endpoint. The service will respond + * to the following URL paths: + * <dl> + * <dt>http://localhost:port/</dt> + * <dd>The SPARQL end point for the default namespace as specified by the + * <code>namespace</code> command line argument.</dd> + * <dt>http://localhost:port/namespace/NAMESPACE</dt> + * <dd>where <code>NAMESPACE</code> is the namespace of some triple store or + * quad store, may be used to address ANY triple or quads store in the + * bigdata instance.</dd> + * <dt>http://localhost:port/status</dt> + * <dd>A status page.</dd> + * </dl> + * + * @param args + * USAGE:<br/> + * To start the server:<br/> + * <code>(options) <i>namespace</i> (propertyFile|configFile) )</code> + * <p> + * <i>Where:</i> + * <dl> + * <dt>port</dt> + * <dd>The port on which the service will respond -or- + * <code>0</code> to use any open port.</dd> + * <dt>namespace</dt> + * <dd>The namespace of the default SPARQL endpoint (the + * namespace will be <code>kb</code> if none was specified when + * the triple/quad store was created).</dd> + * <dt>propertyFile</dt> + * <dd>A java properties file for a standalone {@link Journal}.</dd> + * <dt>configFile</dt> + * <dd>A jini configuration file for a bigdata federation.</dd> + * </dl> + * and <i>options</i> are any of: + * <dl> + * <dt>-nthreads</dt> + * <dd>The #of threads which will be used to answer SPARQL + * queries (default + * {@value ConfigParams#DEFAULT_QUERY_THREAD_POOL_SIZE}).</dd> + * <dt>-forceOverflow</dt> + * <dd>Force a compacting merge of all shards on all data + * services in a bigdata federation (this option should only be + * used for benchmarking purposes).</dd> + * <dt>readLock</dt> + * <dd>The commit time against which the server will assert a + * read lock by holding open a read-only transaction against that + * commit point. When given, queries will default to read against + * this commit point. Otherwise queries will default to read + * against the most recent commit point on the database. + * Regardless, each query will be issued against a read-only + * transaction.</dt> + * </dl> + * </p> + */ // * <dt>bufferCapacity [#bytes]</dt> // * <dd>Specify the capacity of the buffers used to decouple the // * query evaluation from the consumption of the HTTP response by @@ -144,7 +145,7 @@ int port = 80; String namespace = "kb"; - int queryThreadPoolSize = 8; + int queryThreadPoolSize = ConfigParams.DEFAULT_QUERY_THREAD_POOL_SIZE; boolean forceOverflow = false; Long readLock = null; Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java 2011-04-14 13:55:29 UTC (rev 4398) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java 2011-04-14 14:04:58 UTC (rev 4399) @@ -58,8 +58,12 @@ * Test suite for {@link RESTServlet} (SPARQL end point and REST API for RDF * data). * + * @todo The methods which return a mutation count should verify the returned + * XML document. + * * @author <a href="mailto:tho...@us...">Bryan Thompson</a> - * @version $Id$ + * @version $Id: TestNanoSparqlServer.java 4398 2011-04-14 13:55:29Z thompsonbry + * $ */ public class TestNanoSparqlServer extends TestCase2 { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-04-15 14:02:31
|
Revision: 4402 http://bigdata.svn.sourceforge.net/bigdata/?rev=4402&view=rev Author: thompsonbry Date: 2011-04-15 14:02:24 +0000 (Fri, 15 Apr 2011) Log Message: ----------- Working on the NanoSparqlServer and its test suite. 1. Added support for the binary interchange type for a SPARQL result set. 2. Added support for ASK queries. 3. Documented the MIME Types which you can CONNEG using the REST API on the wiki. Note that while you can send RDF data using NQUADS and request result sets using JSON, the test suite does not cover those cases because the corresponding writer (NQUADS) or parser (JSON SPARQL results) has not been written yet. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java 2011-04-15 13:30:18 UTC (rev 4401) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java 2011-04-15 14:02:24 UTC (rev 4402) @@ -1,8 +1,7 @@ package com.bigdata.rdf.sail.webapp; -import info.aduna.xml.XMLWriter; - import java.io.OutputStream; +import java.nio.charset.Charset; import java.util.Iterator; import java.util.LinkedList; import java.util.List; @@ -26,15 +25,16 @@ import org.openrdf.query.parser.ParsedQuery; import org.openrdf.query.parser.QueryParser; import org.openrdf.query.parser.sparql.SPARQLParserFactory; +import org.openrdf.query.resultio.BooleanQueryResultFormat; +import org.openrdf.query.resultio.BooleanQueryResultWriter; +import org.openrdf.query.resultio.BooleanQueryResultWriterRegistry; import org.openrdf.query.resultio.TupleQueryResultFormat; import org.openrdf.query.resultio.TupleQueryResultWriter; import org.openrdf.query.resultio.TupleQueryResultWriterRegistry; -import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter; import org.openrdf.repository.RepositoryException; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFWriter; import org.openrdf.rio.RDFWriterRegistry; -import org.openrdf.rio.rdfxml.RDFXMLWriter; import org.openrdf.sail.SailException; import com.bigdata.bop.BufferAnnotations; @@ -49,6 +49,7 @@ import com.bigdata.journal.RWStrategy; import com.bigdata.journal.TimestampUtility; import com.bigdata.rdf.sail.BigdataSail; +import com.bigdata.rdf.sail.BigdataSailBooleanQuery; import com.bigdata.rdf.sail.BigdataSailGraphQuery; import com.bigdata.rdf.sail.BigdataSailRepository; import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; @@ -254,11 +255,18 @@ protected final String mimeType; /** - * The {@link RDFFormat} for the response (required only for queries - * which produce RDF data, as opposed to RDF result sets). + * The character encoding to use with the negotiated {@link #mimeType} + * -or- <code>null</code> (it will be <code>null</code> for a binary + * encoding). */ - protected final RDFFormat format; + protected final Charset charset; + /** + * The file extension (without the leading ".") to use with the + * negotiated {@link #mimeType}. + */ + protected final String fileExt; + /** The request. */ private final HttpServletRequest req; @@ -293,15 +301,19 @@ * @param queryStr * The SPARQL query string. * @param mimeType - * The MIME type to be used for the response. - * @param format - * The {@link RDFFormat} for the response (required only for - * queries which produce RDF data, as opposed to RDF result - * sets). + * The MIME type to be used for the response. The caller must + * verify that the MIME Type is appropriate for the query + * type. + * @param charset + * The character encoding to use with the negotiated MIME + * type (this is <code>null</code> for binary encodings). + * @param fileExt + * The file extension (without the leading ".") to use with + * that MIME Type. * @param req * The request. - * @param resp - * The response. + * @param os + * Where to write the data for the query result. */ protected AbstractQueryTask(// final String namespace,// @@ -309,17 +321,36 @@ final String queryStr,// final QueryType queryType,// final String mimeType,// - final RDFFormat format,// + final Charset charset,// + final String fileExt,// final HttpServletRequest req,// final OutputStream os// - ) { + ) { + if (namespace == null) + throw new IllegalArgumentException(); + if (queryStr == null) + throw new IllegalArgumentException(); + if (queryType == null) + throw new IllegalArgumentException(); + if (mimeType == null) + throw new IllegalArgumentException(); +// if (charset == null) // Note: null for binary encodings. +// throw new IllegalArgumentException(); + if (fileExt == null) + throw new IllegalArgumentException(); + if (req == null) + throw new IllegalArgumentException(); + if (os == null) + throw new IllegalArgumentException(); + this.namespace = namespace; this.timestamp = timestamp; this.queryStr = queryStr; this.queryType = queryType; this.mimeType = mimeType; - this.format = format; + this.charset = charset; + this.fileExt = fileExt; this.req = req; this.os = os; this.queryId = Long.valueOf(m_queryIdFactory.incrementAndGet()); @@ -390,6 +421,43 @@ } // class AbstractQueryTask + /** + * Executes a ASK query. + */ + private class AskQueryTask extends AbstractQueryTask { + + public AskQueryTask(final String namespace, final long timestamp, + final String queryStr, final QueryType queryType, + final BooleanQueryResultFormat format, + final HttpServletRequest req, final OutputStream os) { + + super(namespace, timestamp, queryStr, queryType, format + .getDefaultMIMEType(), format.getCharset(), format + .getDefaultFileExtension(), req, os); + + } + + protected void doQuery(final BigdataSailRepositoryConnection cxn, + final OutputStream os) throws Exception { + + final BigdataSailBooleanQuery query = cxn.prepareBooleanQuery( + QueryLanguage.SPARQL, queryStr, baseURI); + + // Note: getQueryTask() verifies that format will be non-null. + final BooleanQueryResultFormat format = BooleanQueryResultWriterRegistry + .getInstance().getFileFormatForMIMEType(mimeType); + + final BooleanQueryResultWriter w = BooleanQueryResultWriterRegistry + .getInstance().get(format).getWriter(os); + + final boolean result = query.evaluate(); + + w.write(result); + + } + + } + /** * Executes a tuple query. */ @@ -397,12 +465,13 @@ public TupleQueryTask(final String namespace, final long timestamp, final String queryStr, final QueryType queryType, - final String mimeType, final RDFFormat format, + final TupleQueryResultFormat format, final HttpServletRequest req, final OutputStream os) { - super(namespace, timestamp, queryStr, queryType, mimeType, format, - req, os); + super(namespace, timestamp, queryStr, queryType, format + .getDefaultMIMEType(), format.getCharset(), format + .getDefaultFileExtension(), req, os); } @@ -411,22 +480,13 @@ final BigdataSailTupleQuery query = cxn.prepareTupleQuery( QueryLanguage.SPARQL, queryStr, baseURI); - - /* - * FIXME Raise this into the query CONNEG logic parallel to how - * we handle queries which result in RDF data rather than SPARQL - * result sets. - */ + + // Note: getQueryTask() verifies that format will be non-null. final TupleQueryResultFormat format = TupleQueryResultWriterRegistry .getInstance().getFileFormatForMIMEType(mimeType); - final TupleQueryResultWriter w = format == null ? new SPARQLResultsXMLWriter( - new XMLWriter(os)) - : TupleQueryResultWriterRegistry.getInstance().get(format) - .getWriter(os); - -// final RDFWriter w = format == null ? new RDFXMLWriter(os) -// : RDFWriterRegistry.getInstance().get(format).getWriter(os); + final TupleQueryResultWriter w = TupleQueryResultWriterRegistry + .getInstance().get(format).getWriter(os); query.evaluate(w); @@ -441,12 +501,13 @@ public GraphQueryTask(final String namespace, final long timestamp, final String queryStr, final QueryType queryType, - final String mimeType, final RDFFormat format, + final RDFFormat format, final HttpServletRequest req, final OutputStream os) { - super(namespace, timestamp, queryStr, queryType, mimeType, format, - req, os); + super(namespace, timestamp, queryStr, queryType, format + .getDefaultMIMEType(), format.getCharset(), format + .getDefaultFileExtension(), req, os); } @@ -479,9 +540,13 @@ // if(true) // throw new RuntimeException(); - final RDFWriter w = format == null ? new RDFXMLWriter(os) - : RDFWriterRegistry.getInstance().get(format).getWriter(os); + // Note: getQueryTask() verifies that format will be non-null. + final RDFFormat format = RDFWriterRegistry.getInstance() + .getFileFormatForMIMEType(mimeType); + final RDFWriter w = RDFWriterRegistry.getInstance().get(format) + .getWriter(os); + query.evaluate(w); } @@ -531,8 +596,12 @@ final QueryType queryType = QueryType.fromQuery(queryStr); /* - * CONNEG for the mime type. + * CONNEG for the MIME type. * + * Note: An attempt to CONNEG for a MIME type which can not be used with + * a give type of query will result in a response using a default MIME + * Type for that query. + * * TODO This is a hack which will obey an Accept header IF the header * contains a single well-formed MIME Type. Complex accept headers will * not be matched and quality parameters (q=...) are ignored. (Sesame @@ -541,47 +610,34 @@ */ final String acceptStr = req.getHeader("Accept"); - RDFFormat format = acceptStr == null ? null : RDFFormat - .forMIMEType(acceptStr); - - final String mimeType; switch (queryType) { case ASK: { - /* - * FIXME handle ASK. - */ - break; + + final BooleanQueryResultFormat format = acceptStr == null ? null + : BooleanQueryResultFormat.forMIMEType(acceptStr, + BooleanQueryResultFormat.SPARQL); + + return new AskQueryTask(namespace, timestamp, queryStr, queryType, + format, req, os); + } case DESCRIBE: case CONSTRUCT: { - if (format != null) { + final RDFFormat format = RDFFormat.forMIMEType(acceptStr, + RDFFormat.RDFXML); - mimeType = format.getDefaultMIMEType(); + return new GraphQueryTask(namespace, timestamp, queryStr, + queryType, format, req, os); - } else { - - mimeType = BigdataRDFServlet.MIME_RDF_XML; - - } - - return new GraphQueryTask(namespace, timestamp, queryStr, - queryType, mimeType, format, req, os); } case SELECT: { - if (format != null) { + final TupleQueryResultFormat format = TupleQueryResultFormat + .forMIMEType(acceptStr, TupleQueryResultFormat.SPARQL); - mimeType = format.getDefaultMIMEType(); - - } else { - - mimeType = BigdataRDFServlet.MIME_SPARQL_RESULTS_XML; - - } - return new TupleQueryTask(namespace, timestamp, queryStr, - queryType, mimeType, format, req, os); + queryType, format, req, os); } } // switch(queryType) Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java 2011-04-15 13:30:18 UTC (rev 4401) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java 2011-04-15 14:02:24 UTC (rev 4402) @@ -47,6 +47,30 @@ } /** + * Return <code>true</code> if the <code>Content-disposition</code> header + * should be set to indicate that the response body should be handled as an + * attachment rather than presented inline. This is just a hint to the user + * agent. How the user agent handles this hint is up to it. + * + * @param mimeType + * The mime type. + * + * @return <code>true</code> if it should be handled as an attachment. + */ + private boolean isAttachment(final String mimeType) { + if(mimeType.equals(MIME_TEXT_PLAIN)) { + return false; + } else if(mimeType.equals(MIME_SPARQL_RESULTS_XML)) { + return false; + } else if(mimeType.equals(MIME_SPARQL_RESULTS_JSON)) { + return false; + } else if(mimeType.equals(MIME_APPLICATION_XML)) { + return false; + } + return true; + } + + /** * Run a SPARQL query. * * FIXME Does not handle default-graph-uri or named-graph-uri query @@ -95,53 +119,25 @@ resp.setStatus(HTTP_OK); - // Figure out the filename extension for the response. - - final String ext; - final String charset; - - if(queryTask.format != null) { + resp.setContentType(queryTask.mimeType); - /* - * If some RDFormat was negotiated, then construct the filename - * for the attachment using the default extension for that - * format and the queryId. - */ - - ext = queryTask.format.getDefaultFileExtension(); - - charset = queryTask.format.getCharset().name(); + if (queryTask.charset != null) { - } else { - - if(queryTask.mimeType.equals(MIME_SPARQL_RESULTS_XML)) { - - // See http://www.w3.org/TR/rdf-sparql-XMLres/ - - ext = "srx"; // Sparql Result Set. - - } else if(queryTask.mimeType.equals(MIME_SPARQL_RESULTS_JSON)) { - - // See http://www.w3.org/TR/rdf-sparql-json-res/ - - ext = "srj"; - - } else { - - ext = "xxx"; - - } - - charset = QueryServlet.charset; + // Note: Binary encodings do not specify charset. + resp.setCharacterEncoding(queryTask.charset.name()); } - - resp.setContentType(queryTask.mimeType); - - resp.setCharacterEncoding(charset); - resp.setHeader("Content-disposition", "attachment; filename=query" - + queryTask.queryId + "." + ext); + if (isAttachment(queryTask.mimeType)) { + /* + * Mark this as an attachment (rather than inline). This is just + * a hint to the user agent. How the user agent handles this + * hint is up to it. + */ + resp.setHeader("Content-disposition", + "attachment; filename=query" + queryTask.queryId + "." + + queryTask.fileExt); + } if(TimestampUtility.isCommitTime(queryTask.timestamp)) { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java 2011-04-15 13:30:18 UTC (rev 4401) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java 2011-04-15 14:02:24 UTC (rev 4402) @@ -33,8 +33,14 @@ import org.openrdf.model.vocabulary.RDFS; import org.openrdf.query.BindingSet; import org.openrdf.query.TupleQueryResultHandlerBase; +import org.openrdf.query.resultio.BooleanQueryResultFormat; +import org.openrdf.query.resultio.BooleanQueryResultParser; +import org.openrdf.query.resultio.BooleanQueryResultParserFactory; +import org.openrdf.query.resultio.BooleanQueryResultParserRegistry; +import org.openrdf.query.resultio.TupleQueryResultFormat; import org.openrdf.query.resultio.TupleQueryResultParser; -import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLParserFactory; +import org.openrdf.query.resultio.TupleQueryResultParserFactory; +import org.openrdf.query.resultio.TupleQueryResultParserRegistry; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParser; @@ -65,15 +71,13 @@ * * @todo Test default-graph-uri(s) and named-graph-uri(s). * - * @todo Verify conneg for various mime type for different kinds of queries. - * E.g., conneg for json result sets for SELECT, conneg for n3 response - * for CONSTRUCT, etc. The logic for handling Accept headers does not pay - * attention to q=... parameters, so only a single mime type should be - * specified in the Accept header. - * - * @todo NQUADS RDFWriter needs to be written. Then we can test NQUADS + * @todo An NQUADS RDFWriter needs to be written. Then we can test NQUADS * interchange. * + * @todo A SPARQL result sets JSON parser needs to be written (Sesame bundles a + * writer, but not a parser) before we can test queries which CONNEG for a + * JSON result set. + * * @todo Add tests for SIDS mode interchange of RDF XML. * * @todo The methods which return a mutation count should verify the returned @@ -247,7 +251,7 @@ public String defaultGraphUri = null; /** The accept header. */ public String acceptHeader = // - BigdataRDFServlet.MIME_SPARQL_RESULTS_JSON + ";q=1" + // + BigdataRDFServlet.MIME_SPARQL_RESULTS_XML + ";q=1" + // "," + // RDFFormat.RDFXML.getDefaultMIMEType() + ";q=1"// ; @@ -356,10 +360,6 @@ */ protected Graph buildGraph(final HttpURLConnection conn) throws Exception { -// System.err.println(getResponseBody(conn)); - - final Graph g = new GraphImpl(); - try { final String baseURI = ""; @@ -380,6 +380,8 @@ fail("RDFParserFactory not found: Content-Type=" + contentType + ", format=" + format); + final Graph g = new GraphImpl(); + final RDFParser rdfParser = factory.getParser(); rdfParser.setValueFactory(new ValueFactoryImpl()); @@ -405,6 +407,52 @@ } + /** + * Parse a SPARQL result set for an ASK query. + * + * @param conn + * The connection from which to read the results. + * + * @return <code>true</code> or <code>false</code> depending on what was + * encoded in the SPARQL result set. + * + * @throws Exception + * If anything goes wrong, including if the result set does not + * encode a single boolean value. + */ + protected boolean askResults(final HttpURLConnection conn) throws Exception { + + try { + + final String contentType = conn.getContentType(); + + final BooleanQueryResultFormat format = BooleanQueryResultFormat + .forMIMEType(contentType); + + if (format == null) + fail("No format for Content-Type: " + contentType); + + final BooleanQueryResultParserFactory factory = BooleanQueryResultParserRegistry + .getInstance().get(format); + + if (factory == null) + fail("No factory for Content-Type: " + contentType); + + final BooleanQueryResultParser parser = factory.getParser(); + + final boolean result = parser.parse(conn.getInputStream()); + + return result; + + } finally { + + // terminate the http connection. + conn.disconnect(); + + } + + } + /** * Counts the #of results in a SPARQL result set. * @@ -418,12 +466,26 @@ */ protected long countResults(final HttpURLConnection conn) throws Exception { - final AtomicLong nsolutions = new AtomicLong(); + try { - try { + final String contentType = conn.getContentType(); - final TupleQueryResultParser parser = new SPARQLResultsXMLParserFactory().getParser(); + final TupleQueryResultFormat format = TupleQueryResultFormat + .forMIMEType(contentType); + if (format == null) + fail("No format for Content-Type: " + contentType); + + final TupleQueryResultParserFactory factory = TupleQueryResultParserRegistry + .getInstance().get(format); + + if (factory == null) + fail("No factory for Content-Type: " + contentType); + + final TupleQueryResultParser parser = factory.getParser(); + + final AtomicLong nsolutions = new AtomicLong(); + parser.setTupleQueryResultHandler(new TupleQueryResultHandlerBase() { // Indicates the end of a sequence of solutions. public void endQueryResult() { @@ -460,7 +522,7 @@ } /** - * Select everything in the kb using a GET. + * Issue a "status" request against the service. */ public void test_STATUS() throws Exception { @@ -509,9 +571,50 @@ } - /** - * Select everything in the kb using a GET. - */ + /** + * "ASK" query using GET with an empty KB. + */ + public void test_GET_ASK() throws Exception { + + final String queryStr = "ASK where {?s ?p ?o}"; + + final QueryOptions opts = new QueryOptions(); + opts.serviceURL = m_serviceURL; + opts.queryStr = queryStr; + opts.method = "GET"; + + opts.acceptHeader = BooleanQueryResultFormat.SPARQL.getDefaultMIMEType(); + assertEquals(false, askResults(doSparqlQuery(opts, requestPath))); + + opts.acceptHeader = BooleanQueryResultFormat.TEXT.getDefaultMIMEType(); + assertEquals(false, askResults(doSparqlQuery(opts, requestPath))); + + } + + /** + * "ASK" query using POST with an empty KB. + */ + public void test_POST_ASK() throws Exception { + + final String queryStr = "ASK where {?s ?p ?o}"; + + final QueryOptions opts = new QueryOptions(); + opts.serviceURL = m_serviceURL; + opts.queryStr = queryStr; + opts.method = "POST"; + + opts.acceptHeader = BooleanQueryResultFormat.SPARQL.getDefaultMIMEType(); + assertEquals(false, askResults(doSparqlQuery(opts, requestPath))); + + opts.acceptHeader = BooleanQueryResultFormat.TEXT.getDefaultMIMEType(); + assertEquals(false, askResults(doSparqlQuery(opts, requestPath))); + + } + + /** + * Select everything in the kb using a GET. There will be no solutions + * (assuming that we are using a told triple kb or quads kb w/o axioms). + */ public void test_GET_SELECT_ALL() throws Exception { final String queryStr = "select * where {?s ?p ?o}"; @@ -521,17 +624,21 @@ opts.queryStr = queryStr; opts.method = "GET"; - // No solutions (assuming a told triple kb or quads kb w/o axioms). + opts.acceptHeader = TupleQueryResultFormat.SPARQL.getDefaultMIMEType(); assertEquals(0, countResults(doSparqlQuery(opts, requestPath))); - // Now with json. - opts.acceptHeader = BigdataRDFServlet.MIME_SPARQL_RESULTS_JSON; - assertEquals(0, countResults(doSparqlQuery(opts, requestPath))); + // TODO JSON parser is not bundled by openrdf. +// opts.acceptHeader = TupleQueryResultFormat.JSON.getDefaultMIMEType(); +// assertEquals(0, countResults(doSparqlQuery(opts, requestPath))); + opts.acceptHeader = TupleQueryResultFormat.BINARY.getDefaultMIMEType(); + assertEquals(0, countResults(doSparqlQuery(opts, requestPath))); + } /** - * Select everything in the kb using a POST. + * Select everything in the kb using a POST. There will be no solutions + * (assuming that we are using a told triple kb or quads kb w/o axioms). */ public void test_POST_SELECT_ALL() throws Exception { @@ -542,11 +649,14 @@ opts.queryStr = queryStr; opts.method = "POST"; - // No solutions (assuming a told triple kb or quads kb w/o axioms). + opts.acceptHeader = TupleQueryResultFormat.SPARQL.getDefaultMIMEType(); assertEquals(0, countResults(doSparqlQuery(opts, requestPath))); - // Now with json. - opts.acceptHeader = BigdataRDFServlet.MIME_SPARQL_RESULTS_JSON; + // TODO JSON parser is not bundled by openrdf. +// opts.acceptHeader = TupleQueryResultFormat.JSON.getDefaultMIMEType(); +// assertEquals(0, countResults(doSparqlQuery(opts, requestPath))); + + opts.acceptHeader = TupleQueryResultFormat.BINARY.getDefaultMIMEType(); assertEquals(0, countResults(doSparqlQuery(opts, requestPath))); } @@ -1198,11 +1308,4 @@ } - // FIXME test ASK. - public void test_ASK() throws Exception { - - fail("Write unit test for ASK"); - - } - } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-04-15 17:00:30
|
Revision: 4405 http://bigdata.svn.sourceforge.net/bigdata/?rev=4405&view=rev Author: thompsonbry Date: 2011-04-15 17:00:24 +0000 (Fri, 15 Apr 2011) Log Message: ----------- Added support for the 'default-graph-uri' and 'named-graph-uri' protocol parameters for SPARQL queries. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java 2011-04-15 16:26:21 UTC (rev 4404) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java 2011-04-15 17:00:24 UTC (rev 4405) @@ -20,8 +20,12 @@ import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; +import org.openrdf.model.impl.URIImpl; +import org.openrdf.query.Dataset; import org.openrdf.query.MalformedQueryException; import org.openrdf.query.QueryLanguage; +import org.openrdf.query.impl.AbstractQuery; +import org.openrdf.query.impl.DatasetImpl; import org.openrdf.query.parser.ParsedQuery; import org.openrdf.query.parser.QueryParser; import org.openrdf.query.parser.sparql.SPARQLParserFactory; @@ -268,15 +272,17 @@ protected final String fileExt; /** The request. */ - private final HttpServletRequest req; + protected final HttpServletRequest req; /** Where to write the response. */ - private final OutputStream os; + protected final OutputStream os; /** * Sesame has an option for a base URI during query evaluation. This * provides a symbolic place holder for that URI in case we ever provide * a hook to set it. + * + * FIXME This should be the service end point URI. */ protected final String baseURI = null; @@ -359,6 +365,42 @@ } /** + * If the {@link HttpServletRequest} included one or more + * <code>default-graph-uri</code>s and/or or a + * <code>named-graph-uri</code>s then the {@link Dataset} for the query + * is replaced by the {@link Dataset} constructed from those protocol + * parameters. + * + * @param query + * The query. + */ + protected void overrideDataset(final AbstractQuery query) { + + final String[] defaultGraphURIs = req + .getParameterValues("default-graph-uri"); + + final String[] namedGraphURIs = req + .getParameterValues("named-graph-uri"); + + if (defaultGraphURIs != null || namedGraphURIs != null) { + + final DatasetImpl dataset = new DatasetImpl(); + + if (defaultGraphURIs != null) + for (String graphURI : defaultGraphURIs) + dataset.addDefaultGraph(new URIImpl(graphURI)); + + if (namedGraphURIs != null) + for (String graphURI : namedGraphURIs) + dataset.addNamedGraph(new URIImpl(graphURI)); + + query.setDataset(dataset); + + } + + } + + /** * Execute the query. * * @param cxn @@ -443,6 +485,9 @@ final BigdataSailBooleanQuery query = cxn.prepareBooleanQuery( QueryLanguage.SPARQL, queryStr, baseURI); + // Override query if data set protocol parameters were used. + overrideDataset(query); + // Note: getQueryTask() verifies that format will be non-null. final BooleanQueryResultFormat format = BooleanQueryResultWriterRegistry .getInstance().getFileFormatForMIMEType(mimeType); @@ -481,6 +526,9 @@ final BigdataSailTupleQuery query = cxn.prepareTupleQuery( QueryLanguage.SPARQL, queryStr, baseURI); + // Override query if data set protocol parameters were used. + overrideDataset(query); + // Note: getQueryTask() verifies that format will be non-null. final TupleQueryResultFormat format = TupleQueryResultWriterRegistry .getInstance().getFileFormatForMIMEType(mimeType); @@ -518,6 +566,9 @@ final BigdataSailGraphQuery query = cxn.prepareGraphQuery( QueryLanguage.SPARQL, queryStr, baseURI); + // Override query if data set protocol parameters were used. + overrideDataset(query); + /* * FIXME An error thrown here (such as if format is null and we do * not check it) will cause the response to hang, at least for the Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java 2011-04-15 16:26:21 UTC (rev 4404) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java 2011-04-15 17:00:24 UTC (rev 4405) @@ -74,8 +74,16 @@ * Test suite for {@link RESTServlet} (SPARQL end point and REST API for RDF * data). * - * @todo Test default-graph-uri(s) and named-graph-uri(s). + * @todo Test default-graph-uri(s) and named-graph-uri(s). [To test this, it + * might help to refactor into unit tests for QUERY, INSERT, DELETE, and + * UPDATE and unit tests for TRIPLES (w/ and w/o inferences), SIDS, and + * QUADS] * + * @todo How is the REST API supposed to handle INSERT w/ body and DELETE w/ + * body against a quad store? + * + * @todo Security model? + * * @todo An NQUADS RDFWriter needs to be written. Then we can test NQUADS * interchange. * @@ -120,15 +128,6 @@ new LocalTripleStore(m_jnl, namespace, ITx.UNISOLATED, properties) .create(); - // /* - // * Service will not hold a read lock. - // * - // * Queries will read from the last commit point by default and will - // use - // * a read-only tx to have snapshot isolation for that query. - // */ - // config.timestamp = ITx.READ_COMMITTED; - final Map<String, String> initParams = new LinkedHashMap<String, String>(); { @@ -248,18 +247,22 @@ */ private static class QueryOptions { - /** The URL of the SPARQL endpoint. */ + /** The URL of the SPARQL end point. */ public String serviceURL = null; + /** The HTTP method (GET, POST, etc). */ public String method = "GET"; - /** + + /** * The SPARQL query (this is a short hand for setting the * <code>query</code> URL query parameter). */ public String queryStr = null; + /** Request parameters to be formatted as URL query parameters. */ public Map<String,String[]> requestParams; - /** The accept header. */ + + /** The accept header. */ public String acceptHeader = // BigdataRDFServlet.MIME_SPARQL_RESULTS_XML + ";q=1" + // "," + // @@ -574,7 +577,8 @@ * Class representing the result of a mutation operation against the REST * API. * - * TODO Refactor into the non-test code base? + * TODO Refactor into the non-test code base along with the XML generation + * and XML parsing? */ private static class MutationResult { @@ -651,10 +655,9 @@ */ public void test_STATUS() throws Exception { - final HttpURLConnection conn = doConnect(m_serviceURL + "/status", "GET"); + final HttpURLConnection conn = doConnect(m_serviceURL + "/status", + "GET"); - // No solutions (assuming a told triple kb or quads kb w/o axioms). - // connect. conn.connect(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-06-09 20:05:33
|
Revision: 4676 http://bigdata.svn.sourceforge.net/bigdata/?rev=4676&view=rev Author: mrpersonick Date: 2011-06-09 20:05:26 +0000 (Thu, 09 Jun 2011) Log Message: ----------- working through unit test failures Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSearchQuery.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2011-06-09 19:36:15 UTC (rev 4675) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2011-06-09 20:05:26 UTC (rev 4676) @@ -629,8 +629,8 @@ if (log.isInfoEnabled()) { // just for now while i'm debugging - log.info("rule=" + rule + ":::query=\n" - + BOpUtility.toString(left)); + log.info("rule: " + rule); + log.info("query:\n" + BOpUtility.toString(left)); } return left; Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java 2011-06-09 19:36:15 UTC (rev 4675) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java 2011-06-09 20:05:26 UTC (rev 4676) @@ -465,10 +465,13 @@ final LinkedList<NV> anns = new LinkedList<NV>(); anns.add(new NV(BOp.Annotations.BOP_ID, thisId)); anns.add(new NV(Union.Annotations.SUBQUERIES,args)); -// anns.add(new NV(Union.Annotations.EVALUATION_CONTEXT, -// BOpEvaluationContext.CONTROLLER)); -// anns.add(new NV(Union.Annotations.CONTROLLER, true)); +// if (union.getParent() == null) { + anns.add(new NV(Union.Annotations.EVALUATION_CONTEXT, + BOpEvaluationContext.CONTROLLER)); + anns.add(new NV(Union.Annotations.CONTROLLER, true)); +// } + final Union thisOp = new Union(new BOp[]{}, NV .asMap(anns.toArray(new NV[anns.size()]))); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSearchQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSearchQuery.java 2011-06-09 19:36:15 UTC (rev 4675) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestSearchQuery.java 2011-06-09 20:05:26 UTC (rev 4676) @@ -27,11 +27,13 @@ package com.bigdata.rdf.sail; import info.aduna.iteration.CloseableIteration; +import info.aduna.iteration.Iteration; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.HashSet; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; @@ -301,7 +303,7 @@ // Note: Whether or not this solution is present depends on the // default value for minCosine. -// expected.add(new LiteralImpl("Yellow Rose")); + expected.add(new LiteralImpl("Yellow Rose")); expected.add(new LiteralImpl("Old Yellow House")); @@ -1372,6 +1374,175 @@ } + private final void doQuery() throws Exception { + + final BigdataSail sail = getSail(); + + try { + + sail.initialize(); + final BigdataSailRepository repo = new BigdataSailRepository(sail); + final BigdataSailRepositoryConnection cxn = + (BigdataSailRepositoryConnection) repo.getConnection(); + + try { + + cxn.setAutoCommit(false); + + final String freeTextSearch = "how now brown cow"; + + final String snippetVar = "target"; + + final String queryTemplate = + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix bds: <"+BD.SEARCH_NAMESPACE+"> " + + "prefix rdf: <"+RDF.NAMESPACE+"> " + + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + + "select ?target ?o ?type ?score " + + "where " + + "{ " + + " ?o bds:search \""+freeTextSearch+"\" . " + + " ?o bds:minRelevance \"0.0\" . " + + " ?o bds:relevance ?score . " + + " ?o bds:minRank \"MINRANK\" . " + + " ?o bds:maxRank \"MAXRANK\" . " + + " ?target ?p ?o . " + + " ?target rdf:type ?type . " + + "}"; + + final ITextIndexer search = + sail.getDatabase().getLexiconRelation().getSearchEngine(); + + final int count = search.count( + freeTextSearch, + null, // languageCode + true, // prefixMatch + 0.0d, // minCosine + 1.0d, // maxCosine + 0, // minRank + Integer.MAX_VALUE, // maxRank + false, // matchAllTerms + BD.DEFAULT_TIMEOUT, // timeout + TimeUnit.MILLISECONDS // unit + ); + + final Collection<BindingSet> results = new LinkedList<BindingSet>(); + if (count < 1000) { + + // just go ahead and process the full query + + final TupleQuery tupleQuery = cxn.prepareTupleQuery( + QueryLanguage.SPARQL, + queryTemplate + .replace("MINRANK", "0") + .replace("MAXRANK", String.valueOf(Integer.MAX_VALUE))); + + tupleQuery.setIncludeInferred(true /* includeInferred */); + + final TupleQueryResult tqr = tupleQuery.evaluate(); + + while (tqr.hasNext()) { + results.add(tqr.next()); + } + + } else { + + final int numSnippets = 10; + + results.addAll(processRankChunks( + cxn, queryTemplate, count, numSnippets)); + + } + + // do something with the results + for (BindingSet bs : results) { + System.err.println(bs); + } + + } finally { + cxn.close(); + } + } finally { + sail.__tearDownUnitTest(); + } + + } + + /** + * Process a query in min/max rank chunks, with a goal of reaching the + * number of snippets specified by numSnippets. + * + * @param cxn + * The sail connection. + * @param queryTemplate + * The query template. Uses "target" as the variable for snippets, + * and the strings "MINRANK" and "MAXRANK" as the placeholders + * for the rank chunk bounds. + * @param numFreeTextHits + * The number of free text search hits this query produces if + * run without min/max rank. + * @param numSnippets + * The target number of snippets. Might produce less if all free + * text search hits are processed. Might produce more + * if the rank chunk is big enough to produce excess snippets + * (the entire rank chunk is always processed). + */ + private Collection<BindingSet> processRankChunks( + final RepositoryConnection cxn, + final String queryTemplate, + final int numFreeTextHits, + final int numSnippets) throws Exception { + + final Collection<BindingSet> result = new LinkedList<BindingSet>(); + + // keep track of the # of snippets + final Set<IV> snippets = new LinkedHashSet<IV>(); + + // the size of the rank chunks + final int chunkSize = 1000; + + int minRank = 1; + int maxRank = chunkSize; + + // keep doing chunks while we haven't reached our snippet goal and + // we haven't run out of free text search results. + while (snippets.size() < numSnippets && minRank < numFreeTextHits) { + + final TupleQuery tupleQuery = cxn.prepareTupleQuery( + QueryLanguage.SPARQL, + queryTemplate + .replace("MINRANK", String.valueOf(minRank)) + .replace("MAXRANK", String.valueOf(maxRank))); + + tupleQuery.setIncludeInferred(true /* includeInferred */); + + final TupleQueryResult chunk = tupleQuery.evaluate(); + + while (chunk.hasNext()) { + + final BindingSet bs = chunk.next(); + + final BigdataValue val = (BigdataValue) + bs.getBinding("target").getValue(); + + final IV iv = val.getIV(); + + // LinkedHashSet<IV> will guarantee uniqueness + snippets.add(iv); + + result.add(bs); + + } + + minRank = maxRank+1; + maxRank = maxRank+chunkSize; + + } + + return result; + + } + /* prefix BIGDATA_QUERY_HINTS: <http://www.bigdata.com/queryHints#com.bigdata.rdf.sail.QueryHints.optimizer=None> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-17 17:09:01
|
Revision: 4726 http://bigdata.svn.sourceforge.net/bigdata/?rev=4726&view=rev Author: thompsonbry Date: 2011-06-17 17:08:54 +0000 (Fri, 17 Jun 2011) Log Message: ----------- Added the original SPARQL and parsed SPARQL operator tree in their own sections of the page. Modified to stream the explanation and status page results rather than buffering in memory. Removed the XMLBuilder and HTMLBuilder constructor variants which did not require the caller to provide the target on which to write the data. The XMLBuilder#toString() assumed that the Writer was a StringWriter. That assumption was not compatible with the use of the class when building a servlet response. See https://sourceforge.net/apps/trac/bigdata/ticket/331 Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HTMLBuilder.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/StatusServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/XMLBuilder.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestXMLBuilder.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java 2011-06-17 17:02:38 UTC (rev 4725) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java 2011-06-17 17:08:54 UTC (rev 4726) @@ -264,7 +264,8 @@ protected final QueryType queryType; /** - * The negotiated MIME type to be used for the query response. + * The negotiated MIME type to be used for the query response (this + * does not include the charset encoding). */ protected final String mimeType; @@ -314,6 +315,15 @@ volatile protected UUID queryId2; /** + * The parsed query. It will be one of the {@link BigdataSailQuery} + * implementations. They all extend {@link AbstractQuery}. + * <p> + * Note: This field is made visible by the volatile write on + * {@link #queryId2}. + */ + protected AbstractQuery sailQuery; + + /** * When true, provide an "explanation" for the query (query plan, query * evaluation statistics) rather than the results of the query. */ @@ -435,24 +445,53 @@ } - /** - * Sets {@link #queryId2} to the {@link UUID} which will be associated - * with the {@link IRunningQuery}. If {@link QueryHints#QUERYID} has - * already been used by the application to specify the {@link UUID} then - * that {@link UUID} is noted. Otherwise, a random {@link UUID} is - * generated and assigned to the query by binding it on the query hints. + /** + * * <p> * Note: This is also responsible for noticing the time at which the * query begins to execute and storing the {@link RunningQuery} in the * {@link #m_queries} map. * + * @param query + */ + protected void setupQuery(final AbstractQuery query) { + + // Note the begin time for the query. + final long begin = System.nanoTime(); + + // Figure out the UUID under which the query will execute. + final UUID queryId2 = setQueryId((BigdataSailQuery)query); + + // Override query if data set protocol parameters were used. + overrideDataset(query); + + // Set the query object. + this.sailQuery = query; + + // Set the IRunningQuery's UUID (volatile write!) + this.queryId2 = queryId2; + + // Stuff it in the map of running queries. + m_queries.put(queryId, new RunningQuery(queryId.longValue(), + queryId2, queryStr, begin)); + + } + + /** + * Determines the {@link UUID} which will be associated with the + * {@link IRunningQuery}. If {@link QueryHints#QUERYID} has already been + * used by the application to specify the {@link UUID} then that + * {@link UUID} is noted. Otherwise, a random {@link UUID} is generated + * and assigned to the query by binding it on the query hints. + * * @param query * The query. + * + * @return The {@link UUID} which will be associated with the + * {@link IRunningQuery}. */ - protected void setQueryId(final BigdataSailQuery query) { + protected UUID setQueryId(final BigdataSailQuery query) { assert queryId2 == null; // precondition. - // Note the begin time for the query. - final long begin = System.nanoTime(); // Figure out the effective UUID under which the query will run. final String queryIdStr = query.getQueryHints().getProperty( QueryHints.QUERYID); @@ -463,9 +502,7 @@ } else { queryId2 = UUID.fromString(queryIdStr); } - // Stuff it in the map of running queries. - m_queries.put(queryId, new RunningQuery(queryId.longValue(), - queryId2, queryStr, begin)); + return queryId2; } /** @@ -564,13 +601,9 @@ final BigdataSailBooleanQuery query = cxn.prepareBooleanQuery( QueryLanguage.SPARQL, queryStr, baseURI); - - // Figure out the UUID under which the query will execute. - setQueryId(query); + + setupQuery(query); - // Override query if data set protocol parameters were used. - overrideDataset(query); - // Note: getQueryTask() verifies that format will be non-null. final BooleanQueryResultFormat format = BooleanQueryResultWriterRegistry .getInstance().getFileFormatForMIMEType(mimeType); @@ -609,12 +642,8 @@ final BigdataSailTupleQuery query = cxn.prepareTupleQuery( QueryLanguage.SPARQL, queryStr, baseURI); - // Figure out the UUID under which the query will execute. - setQueryId(query); + setupQuery(query); - // Override query if data set protocol parameters were used. - overrideDataset(query); - // Note: getQueryTask() verifies that format will be non-null. final TupleQueryResultFormat format = TupleQueryResultWriterRegistry .getInstance().getFileFormatForMIMEType(mimeType); @@ -652,11 +681,7 @@ final BigdataSailGraphQuery query = cxn.prepareGraphQuery( QueryLanguage.SPARQL, queryStr, baseURI); - // Figure out the UUID under which the query will execute. - setQueryId(query); - - // Override query if data set protocol parameters were used. - overrideDataset(query); + setupQuery(query); /* * FIXME An error thrown here (such as if format is null and we do Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFServlet.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFServlet.java 2011-06-17 17:02:38 UTC (rev 4725) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFServlet.java 2011-06-17 17:08:54 UTC (rev 4726) @@ -32,6 +32,7 @@ import java.io.PipedInputStream; import java.io.PipedOutputStream; import java.io.PrintWriter; +import java.io.StringWriter; import javax.servlet.Servlet; import javax.servlet.http.HttpServletRequest; @@ -267,12 +268,14 @@ protected void reportModifiedCount(final HttpServletResponse resp, final long nmodified, final long elapsed) throws IOException { - final XMLBuilder t = new XMLBuilder(); + final StringWriter w = new StringWriter(); + + final XMLBuilder t = new XMLBuilder(w); t.root("data").attr("modified", nmodified) .attr("milliseconds", elapsed).close(); - buildResponse(resp, HTTP_OK, MIME_APPLICATION_XML, t.toString()); + buildResponse(resp, HTTP_OK, MIME_APPLICATION_XML, w.toString()); } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HTMLBuilder.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HTMLBuilder.java 2011-06-17 17:02:38 UTC (rev 4725) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HTMLBuilder.java 2011-06-17 17:08:54 UTC (rev 4726) @@ -1,6 +1,7 @@ package com.bigdata.rdf.sail.webapp; import java.io.IOException; +import java.io.Writer; /** * Variant of {@link XMLBuilder} for HTML output. @@ -9,12 +10,23 @@ */ public class HTMLBuilder extends XMLBuilder { - public HTMLBuilder() throws IOException { - super(false); - } + public HTMLBuilder(final Writer w) throws IOException { - public Node body() throws IOException { - return root("html").node("body"); + super(false/* isXML */, null/* encoding */, w); + + } + + public HTMLBuilder(final String encoding, final Writer w) + throws IOException { + + super(false/* isXML */, encoding, w); + + } + + public Node body() throws IOException { + + return root("html").node("body"); + } } \ No newline at end of file Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java 2011-06-17 17:02:38 UTC (rev 4725) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java 2011-06-17 17:08:54 UTC (rev 4726) @@ -2,7 +2,8 @@ import java.io.IOException; import java.io.OutputStream; -import java.io.StringWriter; +import java.io.OutputStreamWriter; +import java.io.Writer; import java.util.UUID; import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; @@ -19,8 +20,8 @@ import com.bigdata.bop.fed.QueryEngineFactory; import com.bigdata.journal.IIndexManager; import com.bigdata.journal.TimestampUtility; -import com.bigdata.rawstore.Bytes; import com.bigdata.rdf.sail.webapp.BigdataRDFContext.AbstractQueryTask; +import com.bigdata.util.HTMLUtility; import com.bigdata.util.InnerCause; /** @@ -192,8 +193,16 @@ getBigdataRDFContext().queryService.execute(ft); if (explain) { - // Send an explanation instead of the query results. - explainQuery(queryStr, queryTask, ft, os); + final Writer w = new OutputStreamWriter(os, queryTask.charset); + try { + // Send an explanation instead of the query results. + explainQuery(queryStr, queryTask, ft, w); + } finally { + w.flush(); + w.close(); + os.flush(); + os.close(); + } } else { // Wait for the Future. ft.get(); @@ -222,11 +231,11 @@ */ private void explainQuery(final String queryStr, final AbstractQueryTask queryTask, final FutureTask<Void> ft, - final OutputStream os) throws Exception { - + final Writer w) throws Exception { + /* - * Spin until either we have the IRunningQuery or the Future of the - * query is done (in which case we won't get it). + * Spin until either we have the UUID of the IRunningQuery or the Future + * of the query is done. */ if(log.isDebugEnabled()) log.debug("Will build explanation"); @@ -240,49 +249,44 @@ // Ignore. } if (queryTask.queryId2 != null) { + // Got it. queryId2 = queryTask.queryId2; break; } } - if (queryId2 != null) { - if(log.isDebugEnabled()) - log.debug("Resolving IRunningQuery: queryId2=" + queryId2); - final IIndexManager indexManager = getBigdataRDFContext() - .getIndexManager(); - final QueryEngine queryEngine = QueryEngineFactory - .getQueryController(indexManager); - while (!ft.isDone() && q == null) { - try { - // Wait a bit for the IRunningQuery to *start*. - ft.get(1/* timeout */, TimeUnit.MILLISECONDS); - } catch(TimeoutException ex) { - // Ignore. + + if(ft.isDone()) { + /* + * If the query is done, the check for an error before we build up + * the explanation document. + */ + ft.get(); + + /* + * No error and the Future is done. The UUID of the IRunningQuery + * MUST have been assigned. If we do not have it yet, then check + * once more. If it is not set then that is an error. + */ + if (queryTask.queryId2 != null) { + // Check once more. + queryId2 = queryTask.queryId2; + if (queryId2 == null) { + /* + * This should have been assigned unless the query failed + * during the setup. + */ + throw new AssertionError(); } - // Resolve the IRunningQuery. - try { - q = queryEngine.getRunningQuery(queryId2); - } catch (RuntimeException ex) { - if (InnerCause.isInnerCause(ex, InterruptedException.class)) { - // Ignore. Query terminated normally, but we don't have - // it. - } else { - // Ignore. Query has error, but we will get err from - // Future. - } - } } - if (q != null) - if(log.isDebugEnabled()) - log.debug("Resolved IRunningQuery: query=" + q); } - - // wait for the Future (will toss any exceptions). - ft.get(); + assert queryId2 != null; /* * Build the explanation. + * + * Note: The query may still be executing while we do this. */ - final HTMLBuilder doc = new HTMLBuilder(); + final HTMLBuilder doc = new HTMLBuilder(queryTask.charset.name(), w); { XMLBuilder.Node current = doc.root("html"); @@ -295,37 +299,93 @@ } current = current.node("body"); - if (q != null) { - // Format query statistics as a table. - final StringWriter w = new StringWriter( - 8 * Bytes.kilobyte32); - QueryLog.getTableXHTML(queryStr, q, w, - true/* showQueryDetails */, 64/* maxBopLength */); + current.node("h2", "SPARQL").node("p", + HTMLUtility.escapeForXHTML(queryTask.queryStr)); - // Add into the HTML document. - current.text(w.toString()); - } else { - current.node("p", - "Query ran too quickly to collect statistics."); + current.node("h2", "Parsed Query").node("pre", + HTMLUtility.escapeForXHTML(queryTask.sailQuery.toString())); + + /* + * Spin until we get the IRunningQuery reference or the query is + * done, in which case we won't get it. + */ + if (queryId2 != null) { + if(log.isDebugEnabled()) + log.debug("Resolving IRunningQuery: queryId2=" + queryId2); + final IIndexManager indexManager = getBigdataRDFContext() + .getIndexManager(); + final QueryEngine queryEngine = QueryEngineFactory + .getQueryController(indexManager); + while (!ft.isDone() && q == null) { + try { + // Wait a bit for the IRunningQuery to *start*. + ft.get(1/* timeout */, TimeUnit.MILLISECONDS); + } catch(TimeoutException ex) { + // Ignore. + } + // Resolve the IRunningQuery. + try { + q = queryEngine.getRunningQuery(queryId2); + } catch (RuntimeException ex) { + if (InnerCause.isInnerCause(ex, InterruptedException.class)) { + // Ignore. Query terminated normally, but we don't have + // it. + } else { + // Ignore. Query has error, but we will get err from + // Future. + } + } + } + if (q != null) + if(log.isDebugEnabled()) + log.debug("Resolved IRunningQuery: query=" + q); } + + // wait for the Future (will toss any exceptions). + ft.get(); + + { + current.node("h2", + "Query Evaluation Statistics").node("p"); + if (q != null) { + /* + * Format query statistics as a table. + * + * Note: This is writing on the Writer so it goes directly + * into the HTML document we are building for the client. + */ + QueryLog.getTableXHTML(queryStr, q, w, + true/* showQueryDetails */, 64/* maxBopLength */); + +// // Add into the HTML document. +// statsNode.text(w.toString()); + } else { + /* + * This can happen if we fail to get the IRunningQuery + * reference before the query terminates. E.g., if the + * query runs too quickly there is a data race and the + * reference may not be available anymore. + */ + current + .text("Not available."); + } + } doc.closeAll(current); } - /* - * Send the response. - * - * TODO It would be better to stream this rather than buffer it in - * RAM. That also opens up the opportunity for real-time updates for - * long-running (analytic) queries, incremental information from the - * runtime query optimizer, etc. - */ - if(log.isDebugEnabled()) - log.debug("Sending explanation."); - os.write(doc.toString().getBytes("UTF-8")); - os.flush(); - os.close(); - if(log.isDebugEnabled()) - log.debug("Sent explanation."); +// /* +// * Send the response. +// * +// * TODO It would be better to stream this rather than buffer it in +// * RAM. That also opens up the opportunity for real-time updates for +// * long-running (analytic) queries, incremental information from the +// * runtime query optimizer, etc. +// */ +// if(log.isDebugEnabled()) +// log.debug("Sending explanation."); +// os.write(doc.toString().getBytes("UTF-8")); +// if(log.isDebugEnabled()) +// log.debug("Sent explanation."); } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/StatusServlet.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/StatusServlet.java 2011-06-17 17:02:38 UTC (rev 4725) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/StatusServlet.java 2011-06-17 17:08:54 UTC (rev 4726) @@ -1,7 +1,8 @@ package com.bigdata.rdf.sail.webapp; import java.io.IOException; -import java.io.StringWriter; +import java.io.OutputStreamWriter; +import java.io.Writer; import java.util.Comparator; import java.util.Iterator; import java.util.LinkedHashMap; @@ -17,7 +18,6 @@ import com.bigdata.bop.engine.QueryEngine; import com.bigdata.bop.engine.QueryLog; import com.bigdata.bop.fed.QueryEngineFactory; -import com.bigdata.rawstore.Bytes; import com.bigdata.rdf.sail.webapp.BigdataRDFContext.RunningQuery; import com.bigdata.rdf.store.AbstractTripleStore; import com.bigdata.util.HTMLUtility; @@ -95,7 +95,11 @@ // bigdata namespaces known to the index manager. final boolean showNamespaces = req.getParameter("showNamespaces") != null; - final HTMLBuilder doc = new HTMLBuilder(); + resp.setContentType(MIME_TEXT_HTML); + final Writer w = new OutputStreamWriter(resp.getOutputStream(), "UTF-8"); + try { + + final HTMLBuilder doc = new HTMLBuilder("UTF-8", w); XMLBuilder.Node current = doc.root("html"); { @@ -240,7 +244,7 @@ final Iterator<IRunningQuery> itr = runningQueryAge.values() .iterator(); - final StringWriter w = new StringWriter(Bytes.kilobyte32 * 8); +// final StringWriter w = new StringWriter(Bytes.kilobyte32 * 8); while (itr.hasNext()) { @@ -258,26 +262,33 @@ final String queryStr = acceptedQuery == null ? "N/A" : acceptedQuery.query; - // Format as a table. + // Format as a table, writing onto the response. QueryLog.getTableXHTML(queryStr, query, w, !showQueryDetails, maxBopLength); - // Extract as String - final String s = w.getBuffer().toString(); +// // Extract as String +// final String s = w.getBuffer().toString(); +// +// // Add into the HTML document. +// current.text(s); +// +// // Clear the buffer. +// w.getBuffer().setLength(0); - // Add into the HTML document. - current.text(s); - - // Clear the buffer. - w.getBuffer().setLength(0); - } // next IRunningQuery. } doc.closeAll(current); + + } finally { + + w.flush(); + w.close(); + + } - buildResponse(resp, HTTP_OK, MIME_TEXT_HTML, doc.toString()); +// buildResponse(resp, HTTP_OK, MIME_TEXT_HTML, doc.toString()); } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/XMLBuilder.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/XMLBuilder.java 2011-06-17 17:02:38 UTC (rev 4725) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/XMLBuilder.java 2011-06-17 17:08:54 UTC (rev 4726) @@ -1,9 +1,6 @@ package com.bigdata.rdf.sail.webapp; import java.io.IOException; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.io.StringWriter; import java.io.Writer; /** @@ -23,7 +20,7 @@ * </div> * </xml> * - * XMLBuilder.Node closed = new XMLBuilder(false) + * XMLBuilder.Node closed = new XMLBuilder(false,writer) * .root("xml") * .node("div") * .attr("attr", "attr1") @@ -49,42 +46,59 @@ // private boolean m_pp = false; - public XMLBuilder() throws IOException { - this(true, (OutputStream) null); +// public XMLBuilder() throws IOException { +// +// this(true, (OutputStream) null); +// +// } +// +// public XMLBuilder(boolean xml) throws IOException { +// +// this(xml, (OutputStream) null); +// +// } +// +// public XMLBuilder(final boolean xml, final OutputStream outstr) +// throws IOException { +// +// this(xml, null/* encoding */, outstr); +// +// } +// +// public XMLBuilder(final boolean xml, final String encoding) +// throws IOException { +// +// this(xml, encoding, (OutputStream) null); +// +// } + + public XMLBuilder(final Writer w) throws IOException { + + this(true/* xml */, null/* encoding */, w/* writer */); + } - public XMLBuilder(boolean xml) throws IOException { - this(xml, (OutputStream) null); - } - - public XMLBuilder(boolean xml, OutputStream outstr) throws IOException { - this(xml,null/*encoding*/,outstr); - } - - public XMLBuilder(boolean xml, String encoding) throws IOException { + public XMLBuilder(final boolean xml, final String encoding, + final Writer w) throws IOException { - this(xml, encoding, (OutputStream) null); - - } - - public XMLBuilder(boolean xml, String encoding, OutputStream outstr) throws IOException { - + if(w == null) + throw new IllegalArgumentException(); + this.xml = xml; - if (outstr == null) { - m_writer = new StringWriter(); - } else { - m_writer = new OutputStreamWriter(outstr); - } + this.m_writer = w; if (xml) { - if(encoding!=null) { + if (encoding != null) { m_writer.write("<?xml version=\"1.0\" encoding=\"" + encoding + "\"?>"); } else { m_writer.write("<?xml version=\"1.0\"?>"); } } else { - // TODO Note the optional encoding for use in a meta tag. + /* + * Note: The optional encoding should also be included in a meta tag + * for an HTML document. + */ m_writer.write("<!DOCTYPE HTML PUBLIC"); m_writer.write(" \"-//W3C//DTD HTML 4.01 Transitional//EN\""); m_writer.write(" \"http://www.w3.org/TR/html4/loose.dtd\">"); @@ -99,9 +113,10 @@ // private void initWriter(OutputStream outstr) { // } - public String toString() { - return m_writer.toString(); - } + // Note: This method assumed that m_writer was a StringWriter!!! +// public String toString() { +// return m_writer.toString(); +// } public Node root(String name) throws IOException { return new Node(name, null); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestXMLBuilder.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestXMLBuilder.java 2011-06-17 17:02:38 UTC (rev 4725) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestXMLBuilder.java 2011-06-17 17:08:54 UTC (rev 4726) @@ -28,6 +28,7 @@ package com.bigdata.rdf.sail.webapp; import java.io.IOException; +import java.io.StringWriter; import junit.framework.TestCase2; @@ -61,7 +62,9 @@ */ public void testXMLBuilder() throws IOException { - final XMLBuilder xml = new XMLBuilder(); + final StringWriter w = new StringWriter(); + + final XMLBuilder xml = new XMLBuilder(w); XMLBuilder.Node close = xml.root("data") .attr("id", "TheRoot") @@ -79,7 +82,7 @@ assertTrue(close == null); if(log.isInfoEnabled()) - log.info(xml.toString()); + log.info(w.toString()); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-20 12:42:20
|
Revision: 4739 http://bigdata.svn.sourceforge.net/bigdata/?rev=4739&view=rev Author: thompsonbry Date: 2011-06-20 12:42:13 +0000 (Mon, 20 Jun 2011) Log Message: ----------- Factored out the query hints parser into a utility class and wrote a unit test to verify its behavior for a few simple queries. See https://sourceforge.net/apps/trac/bigdata/ticket/336 Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestAll.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestDescribe.java Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHintsUtility.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHintsUtility.java Removed Paths: ------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHints.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java 2011-06-18 21:21:54 UTC (rev 4738) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java 2011-06-20 12:42:13 UTC (rev 4739) @@ -1,8 +1,6 @@ package com.bigdata.rdf.sail; -import java.util.Map; import java.util.Properties; -import java.util.StringTokenizer; import org.openrdf.query.MalformedQueryException; import org.openrdf.query.QueryLanguage; @@ -11,9 +9,6 @@ import org.openrdf.query.parser.ParsedQuery; import org.openrdf.query.parser.ParsedTupleQuery; import org.openrdf.query.parser.QueryParserUtil; -import org.openrdf.query.parser.sparql.ast.ASTQueryContainer; -import org.openrdf.query.parser.sparql.ast.ParseException; -import org.openrdf.query.parser.sparql.ast.SyntaxTreeBuilder; import org.openrdf.repository.RepositoryException; import org.openrdf.repository.sail.SailQuery; import org.openrdf.repository.sail.SailRepositoryConnection; @@ -26,9 +21,6 @@ import com.bigdata.rdf.sail.BigdataSail.BigdataSailConnection; import com.bigdata.rdf.sail.bench.NanoSparqlClient; import com.bigdata.rdf.sail.bench.NanoSparqlClient.QueryType; -import com.bigdata.rdf.sail.sparql.BaseDeclProcessor; -import com.bigdata.rdf.sail.sparql.PrefixDeclProcessor; -import com.bigdata.rdf.sail.sparql.StringEscapesProcessor; import com.bigdata.rdf.store.AbstractTripleStore; /** @@ -85,7 +77,8 @@ final ParsedGraphQuery parsedQuery = QueryParserUtil.parseGraphQuery( ql, qs, baseURI); - final Properties queryHints = parseQueryHints(ql, qs, baseURI); + final Properties queryHints = QueryHintsUtility.parseQueryHints(ql, qs, + baseURI); final boolean describe = ql == QueryLanguage.SPARQL && NanoSparqlClient.QueryType.fromQuery(qs) == QueryType.DESCRIBE; @@ -110,7 +103,8 @@ final ParsedTupleQuery parsedQuery = QueryParserUtil.parseTupleQuery( ql, queryString, baseURI); - final Properties queryHints = parseQueryHints(ql, queryString, baseURI); + final Properties queryHints = QueryHintsUtility.parseQueryHints(ql, + queryString, baseURI); return new BigdataSailTupleQuery(parsedQuery, this, queryHints); @@ -131,7 +125,8 @@ final ParsedBooleanQuery parsedQuery = QueryParserUtil .parseBooleanQuery(ql, queryString, baseURI); - final Properties queryHints = parseQueryHints(ql, queryString, baseURI); + final Properties queryHints = QueryHintsUtility.parseQueryHints(ql, + queryString, baseURI); return new BigdataSailBooleanQuery(parsedQuery, this, queryHints); @@ -151,7 +146,8 @@ final ParsedQuery parsedQuery = QueryParserUtil.parseQuery(ql, qs, baseURI); - final Properties queryHints = parseQueryHints(ql, qs, baseURI); + final Properties queryHints = QueryHintsUtility.parseQueryHints(ql, qs, + baseURI); if (parsedQuery instanceof ParsedTupleQuery) { @@ -322,60 +318,8 @@ } } - + /** - * Parse query hints from a query string. Query hints are embedded in the - * query string via special namespaces. - * See {@link QueryHints#PREFIX} for more information. - */ - private Properties parseQueryHints(final QueryLanguage ql, - final String queryString, final String baseURI) - throws MalformedQueryException { - try { - final Properties queryHints = new Properties(); - // currently only supporting SPARQL - if (ql == QueryLanguage.SPARQL) { - // the next four lines were taken directly from - // org.openrdf.query.parser.sparql.SPARQLParser.parseQuery(String queryStr, String baseURI) - final ASTQueryContainer qc = SyntaxTreeBuilder.parseQuery(queryString); - StringEscapesProcessor.process(qc); - BaseDeclProcessor.process(qc, baseURI); - final Map<String, String> prefixes = PrefixDeclProcessor - .process(qc); - // iterate the namespaces - for (Map.Entry<String, String> prefix : prefixes.entrySet()) { - // if we see one that matches the magic namespace, try - // to parse it - if (prefix.getKey().equalsIgnoreCase(QueryHints.PREFIX)) { - String hints = prefix.getValue(); - // has to have a # and it can't be at the end - int i = hints.indexOf('#'); - if (i < 0 || i == hints.length()-1) { - throw new MalformedQueryException("bad query hints: " + hints); - } - hints = hints.substring(i+1); - // properties are separated by & - final StringTokenizer st = new StringTokenizer(hints, "&"); - while (st.hasMoreTokens()) { - String hint = st.nextToken(); - i = hint.indexOf('='); - if (i < 0 || i == hint.length()-1) { - throw new MalformedQueryException("bad query hint: " + hint); - } - final String key = hint.substring(0, i); - final String val = hint.substring(i+1); - queryHints.put(key, val); - } - } - } - } - return queryHints; - } catch (ParseException e) { - throw new MalformedQueryException(e.getMessage(), e); - } - } - - /** * Set the change log on the SAIL connection. See {@link IChangeLog} and * {@link IChangeRecord}. * Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHintsUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHintsUtility.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHintsUtility.java 2011-06-20 12:42:13 UTC (rev 4739) @@ -0,0 +1,143 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* +Portions of this code are: + +Copyright Aduna (http://www.aduna-software.com/) � 2001-2007 + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +/* + * Created on Jun 20, 2011 + */ + +package com.bigdata.rdf.sail; + +import java.util.Map; +import java.util.Properties; +import java.util.StringTokenizer; + +import org.openrdf.query.MalformedQueryException; +import org.openrdf.query.QueryLanguage; +import org.openrdf.query.parser.sparql.ast.ASTQueryContainer; +import org.openrdf.query.parser.sparql.ast.ParseException; +import org.openrdf.query.parser.sparql.ast.SyntaxTreeBuilder; + +import com.bigdata.rdf.sail.sparql.BaseDeclProcessor; +import com.bigdata.rdf.sail.sparql.PrefixDeclProcessor; +import com.bigdata.rdf.sail.sparql.StringEscapesProcessor; + +/** + * A utility class for parsing {@link QueryHints}. + * + * @author <a href="mailto:mrp...@us...">Mike Personick</a> + * @version $Id$ + */ +public class QueryHintsUtility { + + /** + * Parse query hints from a query string. Query hints are embedded in the + * query string via special namespaces. + * <p> + * Note: The Sesame operator tree does not include the original query hints, + * which is why this method is not written against the operator tree. + * + * See {@link QueryHints#PREFIX} for more information. + */ + public static Properties parseQueryHints(final QueryLanguage ql, + final String queryString, final String baseURI) + throws MalformedQueryException { + try { + final Properties queryHints = new Properties(); + // currently only supporting SPARQL + if (ql == QueryLanguage.SPARQL) { + // the next four lines were taken directly from + // org.openrdf.query.parser.sparql.SPARQLParser.parseQuery(String queryStr, String baseURI) + final ASTQueryContainer qc = SyntaxTreeBuilder + .parseQuery(queryString); + StringEscapesProcessor.process(qc); + BaseDeclProcessor.process(qc, baseURI); + final Map<String, String> prefixes = PrefixDeclProcessor + .process(qc); + // iterate the namespaces + for (Map.Entry<String, String> prefix : prefixes.entrySet()) { + // if we see one that matches the magic namespace, try + // to parse it + if (prefix.getKey().equalsIgnoreCase(QueryHints.PREFIX)) { + String hints = prefix.getValue(); + // has to have a # and it can't be at the end + int i = hints.indexOf('#'); + if (i < 0 || i == hints.length() - 1) { + throw new MalformedQueryException( + "bad query hints: " + hints); + } + hints = hints.substring(i + 1); + // properties are separated by & + final StringTokenizer st = new StringTokenizer(hints, + "&"); + while (st.hasMoreTokens()) { + final String hint = st.nextToken(); + i = hint.indexOf('='); + if (i < 0 || i == hint.length() - 1) { + throw new MalformedQueryException( + "bad query hint: " + hint); + } + final String key = hint.substring(0, i); + final String val = hint.substring(i+1); + queryHints.put(key, val); + } + } + } + } + return queryHints; + } catch (ParseException e) { + throw new MalformedQueryException(e.getMessage(), e); + } + } + +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHintsUtility.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestAll.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestAll.java 2011-06-18 21:21:54 UTC (rev 4738) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestAll.java 2011-06-20 12:42:13 UTC (rev 4739) @@ -80,7 +80,10 @@ } final TestSuite suite = new TestSuite("Sesame 2.x integration"); - + + // unit tests for extracting query hints from a SPARQL query. + suite.addTestSuite(TestQueryHintsUtility.class); + // bootstrap tests for the BigdataSail suite.addTestSuite(TestBootstrapBigdataSail.class); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestDescribe.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestDescribe.java 2011-06-18 21:21:54 UTC (rev 4738) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestDescribe.java 2011-06-20 12:42:13 UTC (rev 4739) @@ -49,12 +49,12 @@ */ public class TestDescribe extends ProxyBigdataSailTestCase { - protected static Logger log = Logger.getLogger(TestDescribe.class); + private static Logger log = Logger.getLogger(TestDescribe.class); @Override public Properties getProperties() { - Properties props = super.getProperties(); + final Properties props = super.getProperties(); props.setProperty(BigdataSail.Options.TRUTH_MAINTENANCE, "false"); props.setProperty(BigdataSail.Options.AXIOMS_CLASS, NoAxioms.class.getName()); @@ -82,6 +82,7 @@ public void testSingleDescribe() throws Exception { final BigdataSail sail = getSail(); + try { sail.initialize(); final BigdataSailRepository repo = new BigdataSailRepository(sail); final BigdataSailRepositoryConnection cxn = @@ -90,14 +91,14 @@ try { - URI mike = new URIImpl(BD.NAMESPACE+"Mike"); - URI bryan = new URIImpl(BD.NAMESPACE+"Bryan"); - URI person = new URIImpl(BD.NAMESPACE+"Person"); - URI likes = new URIImpl(BD.NAMESPACE+"likes"); - URI rdf = new URIImpl(BD.NAMESPACE+"RDF"); - URI rdfs = new URIImpl(BD.NAMESPACE+"RDFS"); - Literal label1 = new LiteralImpl("Mike"); - Literal label2 = new LiteralImpl("Bryan"); + final URI mike = new URIImpl(BD.NAMESPACE+"Mike"); + final URI bryan = new URIImpl(BD.NAMESPACE+"Bryan"); + final URI person = new URIImpl(BD.NAMESPACE+"Person"); + final URI likes = new URIImpl(BD.NAMESPACE+"likes"); + final URI rdf = new URIImpl(BD.NAMESPACE+"RDF"); + final URI rdfs = new URIImpl(BD.NAMESPACE+"RDFS"); + final Literal label1 = new LiteralImpl("Mike"); + final Literal label2 = new LiteralImpl("Bryan"); /**/ cxn.add(mike, RDF.TYPE, person); cxn.add(mike, likes, rdf); @@ -121,7 +122,7 @@ { - String query = + final String query = "prefix bd: <"+BD.NAMESPACE+"> " + "prefix rdf: <"+RDF.NAMESPACE+"> " + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + @@ -171,20 +172,22 @@ */ final BigdataSailGraphQuery graphQuery = (BigdataSailGraphQuery) cxn.prepareGraphQuery(QueryLanguage.SPARQL, query); - GraphQueryResult result = graphQuery.evaluate(); + final GraphQueryResult result = graphQuery.evaluate(); final TupleExpr tupleExpr = graphQuery.getTupleExpr(); - log.info(tupleExpr); + if(log.isInfoEnabled()) + log.info(tupleExpr); while(result.hasNext()) { - Statement s = result.next(); - log.info(s); + final Statement s = result.next(); + if(log.isInfoEnabled()) + log.info(s); } } { - String query = + final String query = "construct { " + " ?x ?p1 ?o . " + " ?s ?p2 ?x . " + @@ -220,20 +223,26 @@ */ final BigdataSailGraphQuery graphQuery = (BigdataSailGraphQuery) cxn.prepareGraphQuery(QueryLanguage.SPARQL, query); - GraphQueryResult result = graphQuery.evaluate(); + final GraphQueryResult result = graphQuery.evaluate(); final TupleExpr tupleExpr = graphQuery.getTupleExpr(); - log.info(tupleExpr); + if(log.isInfoEnabled()) + log.info(tupleExpr); while(result.hasNext()) { - Statement s = result.next(); - log.info(s); + final Statement s = result.next(); + if(log.isInfoEnabled()) + log.info(s); } } } finally { + cxn.close(); + + } + } finally { sail.__tearDownUnitTest(); } @@ -242,6 +251,7 @@ public void testMultiDescribe() throws Exception { final BigdataSail sail = getSail(); + try { sail.initialize(); final BigdataSailRepository repo = new BigdataSailRepository(sail); final BigdataSailRepositoryConnection cxn = @@ -250,13 +260,13 @@ try { - URI mike = new URIImpl("_:Mike"); - URI person = new URIImpl("_:Person"); - URI likes = new URIImpl("_:likes"); - URI rdf = new URIImpl("_:RDF"); - URI thing = new URIImpl("_:Thing"); - Literal l1 = new LiteralImpl("Mike"); - Literal l2 = new LiteralImpl("RDF"); + final URI mike = new URIImpl("_:Mike"); + final URI person = new URIImpl("_:Person"); + final URI likes = new URIImpl("_:likes"); + final URI rdf = new URIImpl("_:RDF"); + final URI thing = new URIImpl("_:Thing"); + final Literal l1 = new LiteralImpl("Mike"); + final Literal l2 = new LiteralImpl("RDF"); /**/ cxn.add(mike, RDF.TYPE, person); cxn.add(mike, RDFS.LABEL, l1); @@ -279,7 +289,7 @@ { - String query = + final String query = "describe ?x ?y " + "WHERE { " + " ?x <"+likes+"> ?y . " + @@ -311,20 +321,23 @@ */ final BigdataSailGraphQuery graphQuery = (BigdataSailGraphQuery) cxn.prepareGraphQuery(QueryLanguage.SPARQL, query); - GraphQueryResult result = graphQuery.evaluate(); + final GraphQueryResult result = graphQuery.evaluate(); final TupleExpr tupleExpr = graphQuery.getTupleExpr(); - log.info(tupleExpr); + if(log.isInfoEnabled()) + log.info(tupleExpr); while(result.hasNext()) { - Statement s = result.next(); - log.info(s); + final Statement s = result.next(); + if(log.isInfoEnabled()) + log.info(s); } } - } finally { cxn.close(); + } + } finally { sail.__tearDownUnitTest(); } Deleted: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHints.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHints.java 2011-06-18 21:21:54 UTC (rev 4738) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHints.java 2011-06-20 12:42:13 UTC (rev 4739) @@ -1,136 +0,0 @@ -/** -Copyright (C) SYSTAP, LLC 2006-2007. All rights reserved. - -Contact: - SYSTAP, LLC - 4501 Tower Road - Greensboro, NC 27410 - lic...@bi... - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; version 2 of the License. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software -Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ -/* - * Created on Sep 16, 2009 - */ - -package com.bigdata.rdf.sail; - -import java.util.Collection; -import java.util.LinkedList; - -import org.openrdf.model.URI; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.query.BindingSet; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.TupleQuery; -import org.openrdf.query.TupleQueryResult; -import org.openrdf.query.impl.BindingImpl; - -import com.bigdata.bop.PipelineOp; - -/** - * Unit tests the query hints aspect of the {@link BigdataSail} implementation. - * - * @author <a href="mailto:mrp...@us...">Mike Personick</a> - * @version $Id$ - */ -public class TestQueryHints extends QuadsTestCase { - - /** - * - */ - public TestQueryHints() { - } - - /** - * @param arg0 - */ - public TestQueryHints(String arg0) { - super(arg0); - } - - /** - * Tests adding query hints in SPARQL. - * - * @throws Exception - * - * @todo Unfortunately, this does not really _test_ anything since the query - * should be answered correctly regardless of the query hint(s) - * specified. - */ - public void testQueryHints() throws Exception { - - final BigdataSail sail = getSail(); - sail.initialize(); - final BigdataSailRepository repo = new BigdataSailRepository(sail); - final BigdataSailRepositoryConnection cxn = - (BigdataSailRepositoryConnection) repo.getConnection(); - cxn.setAutoCommit(false); - - try { - - URI a = new URIImpl("_:A"); - URI b = new URIImpl("_:B"); - URI c = new URIImpl("_:C"); -/**/ - cxn.add(a, b, c); -/**/ - - /* - * Note: The either flush() or commit() is required to flush the - * statement buffers to the database before executing any operations - * that go around the sail. - */ - cxn.flush();//commit(); - -/**/ - if (log.isInfoEnabled()) { - log.info("\n" + sail.getDatabase().dumpStore()); - } - - { - - final String query = "PREFIX " + QueryHints.PREFIX - + ": " + "<http://www.bigdata.com/queryOption#" + // - PipelineOp.Annotations.MAX_PARALLEL + "=-5" // - + "&" + "com.bigdata.fullScanTreshold=1000" // - + ">\n"// - + "SELECT * " + // - "WHERE { " + // - " <" + a + "> ?p ?o " + // - "}"; - - final TupleQuery tupleQuery = - cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); - tupleQuery.setIncludeInferred(true /* includeInferred */); - final TupleQueryResult result = tupleQuery.evaluate(); - - final Collection<BindingSet> answer = new LinkedList<BindingSet>(); - answer.add(createBindingSet( - new BindingImpl("p", b), - new BindingImpl("o", c) - )); - - compare(result, answer); - - } - - } finally { - cxn.close(); - sail.__tearDownUnitTest(); - } - - } - -} Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHintsUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHintsUtility.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHintsUtility.java 2011-06-20 12:42:13 UTC (rev 4739) @@ -0,0 +1,155 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on Jun 20, 2011 + */ + +package com.bigdata.rdf.sail; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Properties; + +import junit.framework.TestCase2; + +import org.openrdf.model.URI; +import org.openrdf.model.impl.URIImpl; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.RDFS; +import org.openrdf.query.MalformedQueryException; +import org.openrdf.query.QueryLanguage; + +import com.bigdata.bop.PipelineOp; +import com.bigdata.rdf.store.BD; + +/** + * Unit test for {@link QueryHintsUtility}. + * + * @author <a href="mailto:tho...@us...">Bryan Thompson</a> + * @version $Id$ + */ +public class TestQueryHintsUtility extends TestCase2 { + + /** + * + */ + public TestQueryHintsUtility() { + } + + /** + * @param name + */ + public TestQueryHintsUtility(String name) { + super(name); + } + + public void test_selectQuery() throws MalformedQueryException { + + final Map<String,String> expected = new LinkedHashMap<String, String>(); + { + + expected.put(PipelineOp.Annotations.MAX_PARALLEL,"-5"); + + expected.put("com.bigdata.fullScanTreshold","1000"); + + } + + final QueryLanguage ql = QueryLanguage.SPARQL; + + final String baseURI = "http://www.bigdata.com/sparql"; + + final URI a = new URIImpl("_:A"); + + final String qs = // + "PREFIX " + QueryHints.PREFIX + ": " + // + "<http://www.bigdata.com/queryOption" + // + "#" + PipelineOp.Annotations.MAX_PARALLEL + "=-5" + // + "&" + "com.bigdata.fullScanTreshold=1000" // + + ">\n"// + + "SELECT * " + "WHERE { " + " <" + a + "> ?p ?o " + "}"; + + final Properties actual = QueryHintsUtility.parseQueryHints(ql, qs, + baseURI); + + assertSameProperties(expected, actual); + + } + + public void test_describeQuery() throws MalformedQueryException { + + final Map<String,String> expected = new LinkedHashMap<String, String>(); + { + + expected.put(PipelineOp.Annotations.MAX_PARALLEL,"-5"); + + expected.put("com.bigdata.fullScanTreshold","1000"); + + } + + final QueryLanguage ql = QueryLanguage.SPARQL; + + final String baseURI = "http://www.bigdata.com/sparql"; + + final String qs = + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix rdf: <"+RDF.NAMESPACE+"> " + + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + + "PREFIX " + QueryHints.PREFIX + ": " + // + "<http://www.bigdata.com/queryOption" + // + "#" + PipelineOp.Annotations.MAX_PARALLEL + "=-5" + // + "&" + "com.bigdata.fullScanTreshold=1000" // + + ">\n"+// + "describe ?x " +// + "WHERE { " +// + " ?x rdf:type bd:Person . " +// + " ?x bd:likes bd:RDF " +// + "}"; + + final Properties actual = QueryHintsUtility.parseQueryHints(ql, qs, + baseURI); + + assertSameProperties(expected, actual); + + } + + private static void assertSameProperties( + final Map<String, String> expected, final Properties actual) { + + assertEquals("size", expected.size(), actual.size()); + + for (Map.Entry<String, String> e : expected.entrySet()) { + + final String name = e.getKey(); + + final String expectedValue = e.getValue(); + + final String actualValue = actual.getProperty(name); + + assertEquals(name, expectedValue, actualValue); + + } + + } + +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHintsUtility.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-20 14:06:22
|
Revision: 4740 http://bigdata.svn.sourceforge.net/bigdata/?rev=4740&view=rev Author: thompsonbry Date: 2011-06-20 14:06:10 +0000 (Mon, 20 Jun 2011) Log Message: ----------- Moved the QueryType class into com.bigdata.rdf.sail. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlClient.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestAll.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBOps.java Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryType.java Removed Paths: ------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryType.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java 2011-06-20 12:42:13 UTC (rev 4739) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSail.java 2011-06-20 14:06:10 UTC (rev 4740) @@ -2913,6 +2913,14 @@ } + public CloseableIteration<? extends Statement, SailException> getStatements( + final Resource s, final URI p, final Value o, + final Resource context) + throws SailException { + return getStatements(s,p,o,true/*includeInferred*/,context==null? + new Resource[]{}:new Resource[]{context}); + } + /** * Note: if the context is <code>null</code>, then you will see data * from each context in a quad store, including anything in the Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java 2011-06-20 12:42:13 UTC (rev 4739) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailGraphQuery.java 2011-06-20 14:06:10 UTC (rev 4740) @@ -49,7 +49,7 @@ public class BigdataSailGraphQuery extends SailGraphQuery implements BigdataSailQuery { - protected static Logger log = Logger.getLogger(BigdataSailGraphQuery.class); + private static Logger log = Logger.getLogger(BigdataSailGraphQuery.class); /** * Query hints are embedded in query strings as namespaces. Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java 2011-06-20 12:42:13 UTC (rev 4739) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java 2011-06-20 14:06:10 UTC (rev 4740) @@ -19,8 +19,6 @@ import com.bigdata.rdf.changesets.IChangeLog; import com.bigdata.rdf.changesets.IChangeRecord; import com.bigdata.rdf.sail.BigdataSail.BigdataSailConnection; -import com.bigdata.rdf.sail.bench.NanoSparqlClient; -import com.bigdata.rdf.sail.bench.NanoSparqlClient.QueryType; import com.bigdata.rdf.store.AbstractTripleStore; /** @@ -81,7 +79,7 @@ baseURI); final boolean describe = ql == QueryLanguage.SPARQL - && NanoSparqlClient.QueryType.fromQuery(qs) == QueryType.DESCRIBE; + && QueryType.fromQuery(qs) == QueryType.DESCRIBE; return new BigdataSailGraphQuery(parsedQuery, this, queryHints, describe); @@ -157,7 +155,7 @@ } else if (parsedQuery instanceof ParsedGraphQuery) { final boolean describe = ql == QueryLanguage.SPARQL - && NanoSparqlClient.QueryType.fromQuery(qs) == QueryType.DESCRIBE; + && QueryType.fromQuery(qs) == QueryType.DESCRIBE; return new BigdataSailGraphQuery((ParsedGraphQuery) parsedQuery, this, queryHints, describe); Copied: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java (from rev 4738, branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryType.java) =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java 2011-06-20 14:06:10 UTC (rev 4740) @@ -0,0 +1,137 @@ +package com.bigdata.rdf.sail; + +import java.util.Arrays; + +/** + * Helper class to figure out the type of a query. + */ +public enum QueryType { + + ASK(0), DESCRIBE(1), CONSTRUCT(2), SELECT(3); + + private final int order; + + private QueryType(final int order) { + + this.order = order; + + } + + private static QueryType getQueryType(final int order) { + switch (order) { + case 0: + return ASK; + case 1: + return DESCRIBE; + case 2: + return CONSTRUCT; + case 3: + return SELECT; + default: + throw new IllegalArgumentException("order=" + order); + } + } + + /** + * Used to note the offset at which a keyword was found. + */ + static private class P implements Comparable<QueryType.P> { + + final int offset; + + final QueryType queryType; + + public P(final int offset, final QueryType queryType) { + this.offset = offset; + this.queryType = queryType; + } + + /** Sort into ascending offset. */ + public int compareTo(final QueryType.P o) { + + return offset - o.offset; + + } + + public int hashCode() { + + return offset; + + } + + public boolean equals(final Object o) { + + if (this == o) + return true; + + if (o instanceof P) { + + final P t = (P) o; + + return this.offset == t.offset && this.queryType == t.queryType; + + } + + return false; + + } + + public String toString() { + + return "{offset=" + offset + ",type=" + queryType + "}"; + + } + + } + + /** + * Hack returns the query type based on the first occurrence of the + * keyword for any known query type in the query. + * + * @param queryStr + * The query. + * + * @return The query type. + */ + static public QueryType fromQuery(final String queryStr) { + + // force all to lower case. + final String s = queryStr.toUpperCase(); + + final int ntypes = QueryType.values().length; + + final QueryType.P[] p = new QueryType.P[ntypes]; + + int nmatch = 0; + for (int i = 0; i < ntypes; i++) { + + final QueryType queryType = getQueryType(i); + + final int offset = s.indexOf(queryType.toString()); + + if (offset == -1) + continue; + + p[nmatch++] = new P(offset, queryType); + + } + + if (nmatch == 0) { + + throw new RuntimeException( + "Could not determine the query type: " + queryStr); + + } + + Arrays.sort(p, 0/* fromIndex */, nmatch/* toIndex */); + + final QueryType.P tmp = p[0]; + + // System.out.println("QueryType: offset=" + tmp.offset + ", type=" + // + tmp.queryType); + + return tmp.queryType; + + } + +} \ No newline at end of file Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlClient.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlClient.java 2011-06-20 12:42:13 UTC (rev 4739) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlClient.java 2011-06-20 14:06:10 UTC (rev 4740) @@ -65,6 +65,7 @@ import com.bigdata.counters.CAT; import com.bigdata.jsr166.LinkedBlockingQueue; +import com.bigdata.rdf.sail.QueryType; /** * A flyweight utility for issuing queries to an http SPARQL endpoint. @@ -89,106 +90,106 @@ */ static private final int DEFAULT_TIMEOUT = 60*1000; - /** - * Helper class to figure out the type of a query. - */ - public static enum QueryType { - - ASK(0), - DESCRIBE(1), - CONSTRUCT(2), - SELECT(3); - - private final int order; - - private QueryType(final int order) { - this.order = order; - } - - private static QueryType getQueryType(final int order) { - switch (order) { - case 0: - return ASK; - case 1: - return DESCRIBE; - case 2: - return CONSTRUCT; - case 3: - return SELECT; - default: - throw new IllegalArgumentException("order=" + order); - } - } - - /** - * Used to note the offset at which a keyword was found. - */ - static private class P implements Comparable<P> { - - final int offset; - final QueryType queryType; - - public P(final int offset, final QueryType queryType) { - this.offset = offset; - this.queryType = queryType; - } - /** Sort into descending offset. */ - public int compareTo(final P o) { - return o.offset - offset; - } - } - - /** - * Hack returns the query type based on the first occurrence of the - * keyword for any known query type in the query. - * - * @param queryStr - * The query. - * - * @return The query type. - */ - static public QueryType fromQuery(final String queryStr) { - - // force all to lower case. - final String s = queryStr.toUpperCase(); - - final int ntypes = QueryType.values().length; - - final P[] p = new P[ntypes]; - - int nmatch = 0; - for (int i = 0; i < ntypes; i++) { - - final QueryType queryType = getQueryType(i); - - final int offset = s.indexOf(queryType.toString()); - - if (offset == -1) - continue; - - p[nmatch++] = new P(offset, queryType); - - } - - if (nmatch == 0) { - - throw new RuntimeException( - "Could not determine the query type: " + queryStr); - - } - - Arrays.sort(p, 0/* fromIndex */, nmatch/* toIndex */); - - final P tmp = p[0]; - -// System.out.println("QueryType: offset=" + tmp.offset + ", type=" -// + tmp.queryType); - - return tmp.queryType; - - } - - } +// /** +// * Helper class to figure out the type of a query. +// */ +// private static enum QueryType { +// +// ASK(0), +// DESCRIBE(1), +// CONSTRUCT(2), +// SELECT(3); +// +// private final int order; +// +// private QueryType(final int order) { +// this.order = order; +// } +// +// private static QueryType getQueryType(final int order) { +// switch (order) { +// case 0: +// return ASK; +// case 1: +// return DESCRIBE; +// case 2: +// return CONSTRUCT; +// case 3: +// return SELECT; +// default: +// throw new IllegalArgumentException("order=" + order); +// } +// } +// +// /** +// * Used to note the offset at which a keyword was found. +// */ +// static private class P implements Comparable<P> { +// +// final int offset; +// final QueryType queryType; +// +// public P(final int offset, final QueryType queryType) { +// this.offset = offset; +// this.queryType = queryType; +// } +// /** Sort into descending offset. */ +// public int compareTo(final P o) { +// return o.offset - offset; +// } +// } +// +// /** +// * Hack returns the query type based on the first occurrence of the +// * keyword for any known query type in the query. +// * +// * @param queryStr +// * The query. +// * +// * @return The query type. +// */ +// static public QueryType fromQuery(final String queryStr) { +// +// // force all to lower case. +// final String s = queryStr.toUpperCase(); +// +// final int ntypes = QueryType.values().length; +// +// final P[] p = new P[ntypes]; +// +// int nmatch = 0; +// for (int i = 0; i < ntypes; i++) { +// +// final QueryType queryType = getQueryType(i); +// +// final int offset = s.indexOf(queryType.toString()); +// +// if (offset == -1) +// continue; +// +// p[nmatch++] = new P(offset, queryType); +// +// } +// +// if (nmatch == 0) { +// +// throw new RuntimeException( +// "Could not determine the query type: " + queryStr); +// +// } +// +// Arrays.sort(p, 0/* fromIndex */, nmatch/* toIndex */); +// +// final P tmp = p[0]; +// +//// System.out.println("QueryType: offset=" + tmp.offset + ", type=" +//// + tmp.queryType); +// +// return tmp.queryType; +// +// } +// +// } /** * Class runs a SPARQL query against an HTTP endpoint. Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java 2011-06-20 12:42:13 UTC (rev 4739) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java 2011-06-20 14:06:10 UTC (rev 4740) @@ -62,6 +62,7 @@ import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; import com.bigdata.rdf.sail.BigdataSailTupleQuery; import com.bigdata.rdf.sail.QueryHints; +import com.bigdata.rdf.sail.QueryType; import com.bigdata.rdf.store.AbstractTripleStore; import com.bigdata.relation.AbstractResource; import com.bigdata.relation.RelationSchema; Deleted: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryType.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryType.java 2011-06-20 12:42:13 UTC (rev 4739) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryType.java 2011-06-20 14:06:10 UTC (rev 4740) @@ -1,103 +0,0 @@ -package com.bigdata.rdf.sail.webapp; - -import java.util.Arrays; - -/** - * Helper class to figure out the type of a query. - */ -public enum QueryType { - - ASK(0), DESCRIBE(1), CONSTRUCT(2), SELECT(3); - - private final int order; - - private QueryType(final int order) { - this.order = order; - } - - private static QueryType getQueryType(final int order) { - switch (order) { - case 0: - return ASK; - case 1: - return DESCRIBE; - case 2: - return CONSTRUCT; - case 3: - return SELECT; - default: - throw new IllegalArgumentException("order=" + order); - } - } - - /** - * Used to note the offset at which a keyword was found. - */ - static private class P implements Comparable<QueryType.P> { - - final int offset; - - final QueryType queryType; - - public P(final int offset, final QueryType queryType) { - this.offset = offset; - this.queryType = queryType; - } - - /** Sort into descending offset. */ - public int compareTo(final QueryType.P o) { - return o.offset - offset; - } - } - - /** - * Hack returns the query type based on the first occurrence of the - * keyword for any known query type in the query. - * - * @param queryStr - * The query. - * - * @return The query type. - */ - static public QueryType fromQuery(final String queryStr) { - - // force all to lower case. - final String s = queryStr.toUpperCase(); - - final int ntypes = QueryType.values().length; - - final QueryType.P[] p = new QueryType.P[ntypes]; - - int nmatch = 0; - for (int i = 0; i < ntypes; i++) { - - final QueryType queryType = getQueryType(i); - - final int offset = s.indexOf(queryType.toString()); - - if (offset == -1) - continue; - - p[nmatch++] = new P(offset, queryType); - - } - - if (nmatch == 0) { - - throw new RuntimeException( - "Could not determine the query type: " + queryStr); - - } - - Arrays.sort(p, 0/* fromIndex */, nmatch/* toIndex */); - - final QueryType.P tmp = p[0]; - - // System.out.println("QueryType: offset=" + tmp.offset + ", type=" - // + tmp.queryType); - - return tmp.queryType; - - } - -} \ No newline at end of file Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestAll.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestAll.java 2011-06-20 12:42:13 UTC (rev 4739) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestAll.java 2011-06-20 14:06:10 UTC (rev 4740) @@ -81,8 +81,11 @@ final TestSuite suite = new TestSuite("Sesame 2.x integration"); - // unit tests for extracting query hints from a SPARQL query. + // test suite for extracting query hints from a SPARQL query. suite.addTestSuite(TestQueryHintsUtility.class); + + // test suite for utility to extract the type of a SPARQL query. + suite.addTestSuite(TestQueryType.class); // bootstrap tests for the BigdataSail suite.addTestSuite(TestBootstrapBigdataSail.class); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBOps.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBOps.java 2011-06-20 12:42:13 UTC (rev 4739) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBOps.java 2011-06-20 14:06:10 UTC (rev 4740) @@ -46,7 +46,6 @@ import org.openrdf.query.impl.BindingImpl; import com.bigdata.rdf.axioms.NoAxioms; -import com.bigdata.rdf.lexicon.LexiconRelation; import com.bigdata.rdf.store.BD; import com.bigdata.rdf.vocab.NoVocabulary; @@ -56,12 +55,12 @@ */ public class TestBOps extends ProxyBigdataSailTestCase { - protected static final Logger log = Logger.getLogger(TestBOps.class); + private static final Logger log = Logger.getLogger(TestBOps.class); @Override public Properties getProperties() { - Properties props = super.getProperties(); + final Properties props = super.getProperties(); props.setProperty(BigdataSail.Options.TRUTH_MAINTENANCE, "false"); props.setProperty(BigdataSail.Options.AXIOMS_CLASS, NoAxioms.class.getName()); @@ -89,6 +88,7 @@ public void testSimpleJoin() throws Exception { final BigdataSail sail = getSail(); + try { sail.initialize(); final BigdataSailRepository repo = new BigdataSailRepository(sail); final BigdataSailRepositoryConnection cxn = @@ -97,17 +97,17 @@ try { - final ValueFactory vf = sail.getValueFactory(); +// final ValueFactory vf = sail.getValueFactory(); final String ns = BD.NAMESPACE; - URI mike = new URIImpl(ns+"Mike"); - URI bryan = new URIImpl(ns+"Bryan"); - URI person = new URIImpl(ns+"Person"); - URI likes = new URIImpl(ns+"likes"); - URI rdf = new URIImpl(ns+"RDF"); - Literal l1 = new LiteralImpl("Mike"); - Literal l2 = new LiteralImpl("Bryan"); + final URI mike = new URIImpl(ns+"Mike"); + final URI bryan = new URIImpl(ns+"Bryan"); + final URI person = new URIImpl(ns+"Person"); + final URI likes = new URIImpl(ns+"likes"); + final URI rdf = new URIImpl(ns+"RDF"); + final Literal l1 = new LiteralImpl("Mike"); + final Literal l2 = new LiteralImpl("Bryan"); /**/ cxn.setNamespace("ns", ns); @@ -132,7 +132,7 @@ { - String query = + final String query = "PREFIX rdf: <"+RDF.NAMESPACE+"> " + "PREFIX rdfs: <"+RDFS.NAMESPACE+"> " + "PREFIX ns: <"+ns+"> " + @@ -146,13 +146,13 @@ final TupleQuery tupleQuery = cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); + final TupleQueryResult result = tupleQuery.evaluate(); // while (result.hasNext()) { // System.err.println(result.next()); // } - Collection<BindingSet> solution = new LinkedList<BindingSet>(); + final Collection<BindingSet> solution = new LinkedList<BindingSet>(); solution.add(createBindingSet(new Binding[] { new BindingImpl("s", mike), new BindingImpl("likes", rdf), @@ -167,9 +167,10 @@ compare(result, solution); } - + } finally { + cxn.close(); + } } finally { - cxn.close(); sail.__tearDownUnitTest(); } @@ -178,6 +179,7 @@ public void testSimpleConstraint() throws Exception { final BigdataSail sail = getSail(); + try { sail.initialize(); final BigdataSailRepository repo = new BigdataSailRepository(sail); final BigdataSailRepositoryConnection cxn = @@ -190,17 +192,17 @@ final String ns = BD.NAMESPACE; - URI jill = new URIImpl(ns+"Jill"); - URI jane = new URIImpl(ns+"Jane"); - URI person = new URIImpl(ns+"Person"); - URI age = new URIImpl(ns+"age"); - URI IQ = new URIImpl(ns+"IQ"); - Literal l1 = new LiteralImpl("Jill"); - Literal l2 = new LiteralImpl("Jane"); - Literal age1 = vf.createLiteral(20); - Literal age2 = vf.createLiteral(30); - Literal IQ1 = vf.createLiteral(130); - Literal IQ2 = vf.createLiteral(140); + final URI jill = new URIImpl(ns+"Jill"); + final URI jane = new URIImpl(ns+"Jane"); + final URI person = new URIImpl(ns+"Person"); + final URI age = new URIImpl(ns+"age"); + final URI IQ = new URIImpl(ns+"IQ"); + final Literal l1 = new LiteralImpl("Jill"); + final Literal l2 = new LiteralImpl("Jane"); + final Literal age1 = vf.createLiteral(20); + final Literal age2 = vf.createLiteral(30); + final Literal IQ1 = vf.createLiteral(130); + final Literal IQ2 = vf.createLiteral(140); /**/ cxn.setNamespace("ns", ns); @@ -227,7 +229,7 @@ { - String query = + final String query = "PREFIX rdf: <"+RDF.NAMESPACE+"> " + "PREFIX rdfs: <"+RDFS.NAMESPACE+"> " + "PREFIX ns: <"+ns+"> " + @@ -243,13 +245,13 @@ final TupleQuery tupleQuery = cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); + final TupleQueryResult result = tupleQuery.evaluate(); // while (result.hasNext()) { // System.err.println(result.next()); // } - Collection<BindingSet> solution = new LinkedList<BindingSet>(); + final Collection<BindingSet> solution = new LinkedList<BindingSet>(); solution.add(createBindingSet(new Binding[] { new BindingImpl("s", jill), new BindingImpl("age", age1), @@ -260,9 +262,10 @@ compare(result, solution); } - } finally { cxn.close(); + } + } finally { sail.__tearDownUnitTest(); } @@ -271,6 +274,7 @@ public void testSimpleOptional() throws Exception { final BigdataSail sail = getSail(); + try { sail.initialize(); final BigdataSailRepository repo = new BigdataSailRepository(sail); final BigdataSailRepositoryConnection cxn = @@ -279,17 +283,17 @@ try { - final ValueFactory vf = sail.getValueFactory(); +// final ValueFactory vf = sail.getValueFactory(); final String ns = BD.NAMESPACE; - URI mike = new URIImpl(ns+"Mike"); - URI bryan = new URIImpl(ns+"Bryan"); - URI person = new URIImpl(ns+"Person"); - URI likes = new URIImpl(ns+"likes"); - URI rdf = new URIImpl(ns+"RDF"); - Literal l1 = new LiteralImpl("Mike"); - Literal l2 = new LiteralImpl("Bryan"); + final URI mike = new URIImpl(ns+"Mike"); + final URI bryan = new URIImpl(ns+"Bryan"); + final URI person = new URIImpl(ns+"Person"); + final URI likes = new URIImpl(ns+"likes"); + final URI rdf = new URIImpl(ns+"RDF"); + final Literal l1 = new LiteralImpl("Mike"); +// final Literal l2 = new LiteralImpl("Bryan"); /**/ cxn.setNamespace("ns", ns); @@ -314,7 +318,7 @@ { - String query = + final String query = "PREFIX rdf: <"+RDF.NAMESPACE+"> " + "PREFIX rdfs: <"+RDFS.NAMESPACE+"> " + "PREFIX ns: <"+ns+"> " + @@ -328,13 +332,13 @@ final TupleQuery tupleQuery = cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); + final TupleQueryResult result = tupleQuery.evaluate(); // while (result.hasNext()) { // System.err.println(result.next()); // } - Collection<BindingSet> solution = new LinkedList<BindingSet>(); + final Collection<BindingSet> solution = new LinkedList<BindingSet>(); solution.add(createBindingSet(new Binding[] { new BindingImpl("s", mike), new BindingImpl("likes", rdf), @@ -349,9 +353,10 @@ compare(result, solution); } - } finally { cxn.close(); + } + } finally { sail.__tearDownUnitTest(); } @@ -360,6 +365,7 @@ public void testOrEquals() throws Exception { final BigdataSail sail = getSail(); + try { sail.initialize(); final BigdataSailRepository repo = new BigdataSailRepository(sail); final BigdataSailRepositoryConnection cxn = @@ -368,20 +374,20 @@ try { - final ValueFactory vf = sail.getValueFactory(); - - final LexiconRelation lex = sail.getDatabase().getLexiconRelation(); +// final ValueFactory vf = sail.getValueFactory(); +// +// final LexiconRelation lex = sail.getDatabase().getLexiconRelation(); final String ns = BD.NAMESPACE; - URI mike = new URIImpl(ns+"Mike"); - URI bryan = new URIImpl(ns+"Bryan"); - URI martyn = new URIImpl(ns+"Martyn"); - URI person = new URIImpl(ns+"Person"); - URI p = new URIImpl(ns+"p"); - Literal l1 = new LiteralImpl("Mike"); - Literal l2 = new LiteralImpl("Bryan"); - Literal l3 = new LiteralImpl("Martyn"); + final URI mike = new URIImpl(ns+"Mike"); + final URI bryan = new URIImpl(ns+"Bryan"); + final URI martyn = new URIImpl(ns+"Martyn"); + final URI person = new URIImpl(ns+"Person"); + final URI p = new URIImpl(ns+"p"); + final Literal l1 = new LiteralImpl("Mike"); + final Literal l2 = new LiteralImpl("Bryan"); + final Literal l3 = new LiteralImpl("Martyn"); /**/ cxn.setNamespace("ns", ns); @@ -420,13 +426,13 @@ final TupleQuery tupleQuery = cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); + final TupleQueryResult result = tupleQuery.evaluate(); // while (result.hasNext()) { // System.err.println(result.next()); // } - Collection<BindingSet> solution = new LinkedList<BindingSet>(); + final Collection<BindingSet> solution = new LinkedList<BindingSet>(); solution.add(createBindingSet(new Binding[] { new BindingImpl("s", mike), new BindingImpl("p", RDFS.LABEL), @@ -441,9 +447,10 @@ compare(result, solution); } - } finally { cxn.close(); + } + } finally { sail.__tearDownUnitTest(); } @@ -452,6 +459,7 @@ public void testHashJoin() throws Exception { final BigdataSail sail = getSail(); + try { sail.initialize(); final BigdataSailRepository repo = new BigdataSailRepository(sail); final BigdataSailRepositoryConnection cxn = @@ -460,21 +468,21 @@ try { - final ValueFactory vf = sail.getValueFactory(); - - final LexiconRelation lex = sail.getDatabase().getLexiconRelation(); +// final ValueFactory vf = sail.getValueFactory(); +// +// final LexiconRelation lex = sail.getDatabase().getLexiconRelation(); final String ns = BD.NAMESPACE; - URI mikeA = new URIImpl(ns+"MikeA"); - URI mikeB = new URIImpl(ns+"MikeB"); - URI bryan = new URIImpl(ns+"Bryan"); - URI martyn = new URIImpl(ns+"Martyn"); - URI person = new URIImpl(ns+"Person"); - URI name = new URIImpl(ns+"name"); - Literal l1 = new LiteralImpl("Mike"); - Literal l2 = new LiteralImpl("Bryan"); - Literal l3 = new LiteralImpl("Martyn"); + final URI mikeA = new URIImpl(ns+"MikeA"); + final URI mikeB = new URIImpl(ns+"MikeB"); + final URI bryan = new URIImpl(ns+"Bryan"); + final URI martyn = new URIImpl(ns+"Martyn"); + final URI person = new URIImpl(ns+"Person"); + final URI name = new URIImpl(ns+"name"); + final Literal l1 = new LiteralImpl("Mike"); + final Literal l2 = new LiteralImpl("Bryan"); + final Literal l3 = new LiteralImpl("Martyn"); /**/ cxn.setNamespace("ns", ns); @@ -525,7 +533,7 @@ //// " filter(!bound(?s2) || ?s1 != ?s2) . " + // "}"; - String query = + final String query = "PREFIX "+QueryHints.PREFIX+": <"+QueryHints.NAMESPACE+QueryHints.HASH_JOIN+"=true> " + "PREFIX rdf: <"+RDF.NAMESPACE+"> " + "PREFIX rdfs: <"+RDFS.NAMESPACE+"> " + @@ -546,11 +554,12 @@ final TupleQuery tupleQuery = cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); - TupleQueryResult result = tupleQuery.evaluate(); + final TupleQueryResult result = tupleQuery.evaluate(); while (result.hasNext()) { final BindingSet tmp = result.next(); - if(log.isInfoEnabled())log.info(tmp.toString()); + if (log.isInfoEnabled()) + log.info(tmp.toString()); } // Collection<BindingSet> solution = new LinkedList<BindingSet>(); @@ -568,9 +577,10 @@ // compare(result, solution); } - } finally { cxn.close(); + } + } finally { sail.__tearDownUnitTest(); } Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryType.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryType.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryType.java 2011-06-20 14:06:10 UTC (rev 4740) @@ -0,0 +1,116 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on Jun 20, 2011 + */ + +package com.bigdata.rdf.sail; + +import junit.framework.TestCase2; + +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.RDFS; + +import com.bigdata.rdf.store.BD; + +/** + * Test suite for {@link QueryType}. + * + * @author <a href="mailto:tho...@us...">Bryan Thompson</a> + * @version $Id$ + */ +public class TestQueryType extends TestCase2 { + + /** + * + */ + public TestQueryType() { + } + + /** + * @param name + */ + public TestQueryType(String name) { + super(name); + } + + public void test_select() { + + final String s = "select ?p ?o where {<http://bigdata.com/foo> ?p ?o}"; + + assertEquals(QueryType.SELECT, QueryType.fromQuery(s)); + + } + + public void test_select_with_ask_in_URI() { + + final String s = "select ?p ?o where {<http://blablabla.com/ask_something> ?p ?o}"; + + assertEquals(QueryType.SELECT, QueryType.fromQuery(s)); + + } + + public void test_describe() { + + final String s = + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix rdf: <"+RDF.NAMESPACE+"> " + + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + + "describe ?x " +// + "WHERE { " +// + " ?x rdf:type bd:Person . " +// + " ?x bd:likes bd:RDF " +// + "}"; + + assertEquals(QueryType.DESCRIBE, QueryType.fromQuery(s)); + + } + + public void test_construct() { + + /* + * Sample query from the SPARQL 1.0 Recommendation. + */ + final String s = "PREFIX foaf: <http://xmlns.com/foaf/0.1/>" + + "PREFIX vcard: <http://www.w3.org/2001/vcard-rdf/3.0#>" + + "CONSTRUCT { <http://example.org/person#Alice> vcard:FN ?name }" + + "WHERE { ?x foaf:name ?name }"; + + assertEquals(QueryType.CONSTRUCT, QueryType.fromQuery(s)); + + } + + public void test_ask() { + + /* + * Sample query from the SPARQL 1.0 Recommendation. + */ + final String s = "PREFIX foaf: <http://xmlns.com/foaf/0.1/>" + + "ASK { ?x foaf:name \"Alice\" }"; + + assertEquals(QueryType.ASK, QueryType.fromQuery(s)); + + } + +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryType.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-20 14:42:01
|
Revision: 4742 http://bigdata.svn.sourceforge.net/bigdata/?rev=4742&view=rev Author: thompsonbry Date: 2011-06-20 14:41:55 +0000 (Mon, 20 Jun 2011) Log Message: ----------- Rewrote the QueryType class to parse the query and examine the type of the ASTQuery node. This fixes the bug, but now we are parsing the query yet one more time so this introduces additional per-query overhead which still needs to be resolved. See https://sourceforge.net/apps/trac/bigdata/ticket/336 Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryType.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java 2011-06-20 14:14:54 UTC (rev 4741) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java 2011-06-20 14:41:55 UTC (rev 4742) @@ -1,90 +1,27 @@ package com.bigdata.rdf.sail; -import java.util.Arrays; +import org.openrdf.query.parser.sparql.ast.ASTAskQuery; +import org.openrdf.query.parser.sparql.ast.ASTConstructQuery; +import org.openrdf.query.parser.sparql.ast.ASTDescribeQuery; +import org.openrdf.query.parser.sparql.ast.ASTQuery; +import org.openrdf.query.parser.sparql.ast.ASTQueryContainer; +import org.openrdf.query.parser.sparql.ast.ASTSelectQuery; +import org.openrdf.query.parser.sparql.ast.ParseException; +import org.openrdf.query.parser.sparql.ast.SyntaxTreeBuilder; +import org.openrdf.query.parser.sparql.ast.TokenMgrError; /** * Helper class to figure out the type of a query. */ public enum QueryType { - ASK(0), DESCRIBE(1), CONSTRUCT(2), SELECT(3); + ASK, DESCRIBE, CONSTRUCT, SELECT; - private final int order; - - private QueryType(final int order) { + private QueryType() { - this.order = order; - } - private static QueryType getQueryType(final int order) { - switch (order) { - case 0: - return ASK; - case 1: - return DESCRIBE; - case 2: - return CONSTRUCT; - case 3: - return SELECT; - default: - throw new IllegalArgumentException("order=" + order); - } - } - /** - * Used to note the offset at which a keyword was found. - */ - static private class P implements Comparable<QueryType.P> { - - final int offset; - - final QueryType queryType; - - public P(final int offset, final QueryType queryType) { - this.offset = offset; - this.queryType = queryType; - } - - /** Sort into ascending offset. */ - public int compareTo(final QueryType.P o) { - - return offset - o.offset; - - } - - public int hashCode() { - - return offset; - - } - - public boolean equals(final Object o) { - - if (this == o) - return true; - - if (o instanceof P) { - - final P t = (P) o; - - return this.offset == t.offset && this.queryType == t.queryType; - - } - - return false; - - } - - public String toString() { - - return "{offset=" + offset + ",type=" + queryType + "}"; - - } - - } - - /** * Hack returns the query type based on the first occurrence of the * keyword for any known query type in the query. * @@ -94,44 +31,22 @@ * @return The query type. */ static public QueryType fromQuery(final String queryStr) { - - // force all to lower case. - final String s = queryStr.toUpperCase(); - - final int ntypes = QueryType.values().length; - - final QueryType.P[] p = new QueryType.P[ntypes]; - - int nmatch = 0; - for (int i = 0; i < ntypes; i++) { - - final QueryType queryType = getQueryType(i); - - final int offset = s.indexOf(queryType.toString()); - - if (offset == -1) - continue; - - p[nmatch++] = new P(offset, queryType); - + + try { + final ASTQueryContainer queryContainer = SyntaxTreeBuilder + .parseQuery(queryStr); + final ASTQuery query = queryContainer.getQuery(); + if(query instanceof ASTSelectQuery) return QueryType.SELECT; + if(query instanceof ASTDescribeQuery) return QueryType.DESCRIBE; + if(query instanceof ASTConstructQuery) return QueryType.CONSTRUCT; + if(query instanceof ASTAskQuery) return QueryType.ASK; + throw new RuntimeException(queryContainer.toString()); + } catch (TokenMgrError ex) { + throw new RuntimeException(ex); + } catch (ParseException ex) { + throw new RuntimeException(ex); } - if (nmatch == 0) { - - throw new RuntimeException( - "Could not determine the query type: " + queryStr); - - } - - Arrays.sort(p, 0/* fromIndex */, nmatch/* toIndex */); - - final QueryType.P tmp = p[0]; - - // System.out.println("QueryType: offset=" + tmp.offset + ", type=" - // + tmp.queryType); - - return tmp.queryType; - } - -} \ No newline at end of file + +} Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryType.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryType.java 2011-06-20 14:14:54 UTC (rev 4741) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryType.java 2011-06-20 14:41:55 UTC (rev 4742) @@ -71,6 +71,17 @@ } + public void test_select_with_ask_in_PREFIX() { + + final String s = + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix foo: <http://www.bigdata.com/test/ask/ns> " + + "select ?p ?o where {<http://blablabla.com/ask_something> ?p ?o}"; + + assertEquals(QueryType.SELECT, QueryType.fromQuery(s)); + + } + public void test_describe() { final String s = This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-20 21:36:46
|
Revision: 4750 http://bigdata.svn.sourceforge.net/bigdata/?rev=4750&view=rev Author: thompsonbry Date: 2011-06-20 21:36:37 +0000 (Mon, 20 Jun 2011) Log Message: ----------- There is now only a single parser invocation per NanoSparqlServer request. In order to achieve this I had to bundle a modified version of the openrdf SPARQLParser, which is called "BigdataSPARQLParser". The modified version extracts the query hints and the query type (ASK, CONSTRUCT, DESCRIBE, or SELECT) and reports them out via an interface which is implemented by the returned ParsedQuery (IBigdataParsedQuery). The logic hacked into the BigdataSailRespositoryConnection (for query hints) and into QueryType (for SELECT, ASK, DESCRIBE or CONSTRUCT) has been removed. The NanoSparqlServer was also modified to report a BAD_REQUEST if there is an error in the SPARQL and to include the text of the syntax error message in the response line. I'm not sure whether we should register the BigdataSparqlParser over the SPARQLParser. The bigdata variant is used by the NanoSparqlServer and the BigdataSailResponsitoryConnection. It is pretty much a requirement for setting up a bigdata sail query, but there might not be any reason to explicitly override the existing SPARQL parser registration. See https://sourceforge.net/apps/trac/bigdata/ticket/336 Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailQuery.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlClient.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestAll.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryType.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedBooleanQuery.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedGraphQuery.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedTupleQuery.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/IBigdataParsedQuery.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/BigdataSPARQLParser.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/BlankNodeVarProcessor.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/GraphPattern.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/TupleExprBuilder.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/WildcardProjectionProcessor.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/package.html branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHints.java Removed Paths: ------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHintsUtility.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestQueryHintsUtility.java Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedBooleanQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedBooleanQuery.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedBooleanQuery.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -0,0 +1,72 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on Jun 20, 2011 + */ + +package com.bigdata.rdf.sail; + +import java.util.Properties; + +import org.openrdf.query.algebra.TupleExpr; +import org.openrdf.query.parser.ParsedBooleanQuery; + +/** + * + * @author <a href="mailto:tho...@us...">Bryan Thompson</a> + * @version $Id$ + */ +public class BigdataParsedBooleanQuery extends ParsedBooleanQuery implements + IBigdataParsedQuery { + + private final QueryType queryType; + private final Properties queryHints; + + /** + * @param tupleExpr + */ + public BigdataParsedBooleanQuery(final TupleExpr tupleExpr, + final QueryType queryType, final Properties queryHints) { + + super(tupleExpr); + + this.queryType = queryType; + + this.queryHints = queryHints; + + } + + public QueryType getQueryType() { + + return queryType; + + } + + public Properties getQueryHints() { + + return queryHints; + + } + +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedBooleanQuery.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedGraphQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedGraphQuery.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedGraphQuery.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -0,0 +1,79 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on Jun 20, 2011 + */ + +package com.bigdata.rdf.sail; + +import java.util.Map; +import java.util.Properties; + +import org.openrdf.query.algebra.TupleExpr; +import org.openrdf.query.parser.ParsedGraphQuery; + +/** + * + * @author <a href="mailto:tho...@us...">Bryan Thompson</a> + * @version $Id$ + */ +public class BigdataParsedGraphQuery extends ParsedGraphQuery implements + IBigdataParsedQuery { + + private final QueryType queryType; + private final Properties queryHints; + + /** + * @param tupleExpr + * A tuple expression representing the query, formulated in Sail Query + * Model objects. + * @param namespaces + * A mapping of namespace prefixes to namespace names representing the + * namespaces that are used in the query. + */ + public BigdataParsedGraphQuery(final TupleExpr tupleExpr, + final Map<String, String> namespaces, final QueryType queryType, + final Properties queryHints) { + + super(tupleExpr, namespaces); + + this.queryType = queryType; + + this.queryHints = queryHints; + + } + + public QueryType getQueryType() { + + return queryType; + + } + + public Properties getQueryHints() { + + return queryHints; + + } + +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedGraphQuery.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedTupleQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedTupleQuery.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedTupleQuery.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -0,0 +1,73 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on Jun 20, 2011 + */ + +package com.bigdata.rdf.sail; + +import java.util.Properties; + +import org.openrdf.query.algebra.TupleExpr; +import org.openrdf.query.parser.ParsedTupleQuery; + +/** + * + * @author <a href="mailto:tho...@us...">Bryan Thompson</a> + * @version $Id$ + */ +public class BigdataParsedTupleQuery extends ParsedTupleQuery implements + IBigdataParsedQuery { + + private final QueryType queryType; + private final Properties queryHints; + + /** + * @param tupleExpr + */ + public BigdataParsedTupleQuery(final TupleExpr tupleExpr, + final QueryType queryType, + final Properties queryHints) { + + super(tupleExpr); + + this.queryType = queryType; + + this.queryHints = queryHints; + + } + + public QueryType getQueryType() { + + return queryType; + + } + + public Properties getQueryHints() { + + return queryHints; + + } + +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataParsedTupleQuery.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailQuery.java 2011-06-20 20:43:47 UTC (rev 4749) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailQuery.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -32,6 +32,8 @@ * Extension API for bigdata queries. * * @author <a href="mailto:mrp...@us...">Mike Personick</a> + * + * @see IBigdataParsedQuery */ public interface BigdataSailQuery { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java 2011-06-20 20:43:47 UTC (rev 4749) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailRepositoryConnection.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -19,6 +19,7 @@ import com.bigdata.rdf.changesets.IChangeLog; import com.bigdata.rdf.changesets.IChangeRecord; import com.bigdata.rdf.sail.BigdataSail.BigdataSailConnection; +import com.bigdata.rdf.sail.sparql.BigdataSPARQLParser; import com.bigdata.rdf.store.AbstractTripleStore; /** @@ -72,18 +73,29 @@ final String qs, final String baseURI) throws MalformedQueryException { - final ParsedGraphQuery parsedQuery = QueryParserUtil.parseGraphQuery( - ql, qs, baseURI); + final SailQuery sailQuery = prepareQuery(ql, qs, baseURI); + + if(sailQuery.getParsedQuery() instanceof ParsedGraphQuery) { + + return (BigdataSailGraphQuery) sailQuery; + + } - final Properties queryHints = QueryHintsUtility.parseQueryHints(ql, qs, - baseURI); + // Wrong type of query. + throw new IllegalArgumentException(); + +// final ParsedGraphQuery parsedQuery = QueryParserUtil.parseGraphQuery( +// ql, qs, baseURI); +// +// final Properties queryHints = QueryHintsUtility.parseQueryHints(ql, qs, +// baseURI); +// +// final boolean describe = ql == QueryLanguage.SPARQL +// && QueryType.fromQuery(qs) == QueryType.DESCRIBE; +// +// return new BigdataSailGraphQuery(parsedQuery, this, queryHints, +// describe); - final boolean describe = ql == QueryLanguage.SPARQL - && QueryType.fromQuery(qs) == QueryType.DESCRIBE; - - return new BigdataSailGraphQuery(parsedQuery, this, queryHints, - describe); - } /** @@ -95,16 +107,27 @@ */ @Override public BigdataSailTupleQuery prepareTupleQuery(final QueryLanguage ql, - final String queryString, final String baseURI) + final String qs, final String baseURI) throws MalformedQueryException { - final ParsedTupleQuery parsedQuery = QueryParserUtil.parseTupleQuery( - ql, queryString, baseURI); + final SailQuery sailQuery = prepareQuery(ql, qs, baseURI); + + if(sailQuery.getParsedQuery() instanceof ParsedTupleQuery) { + + return (BigdataSailTupleQuery) sailQuery; + + } - final Properties queryHints = QueryHintsUtility.parseQueryHints(ql, - queryString, baseURI); + // Wrong type of query. + throw new IllegalArgumentException(); - return new BigdataSailTupleQuery(parsedQuery, this, queryHints); +// final ParsedTupleQuery parsedQuery = QueryParserUtil.parseTupleQuery( +// ql, queryString, baseURI); +// +// final Properties queryHints = QueryHintsUtility.parseQueryHints(ql, +// queryString, baseURI); +// +// return new BigdataSailTupleQuery(parsedQuery, this, queryHints); } @@ -117,36 +140,65 @@ */ @Override public BigdataSailBooleanQuery prepareBooleanQuery(final QueryLanguage ql, - final String queryString, final String baseURI) + final String qs, final String baseURI) throws MalformedQueryException { - final ParsedBooleanQuery parsedQuery = QueryParserUtil - .parseBooleanQuery(ql, queryString, baseURI); + final SailQuery sailQuery = prepareQuery(ql, qs, baseURI); + + if(sailQuery.getParsedQuery() instanceof ParsedBooleanQuery) { + + return (BigdataSailBooleanQuery) sailQuery; + + } - final Properties queryHints = QueryHintsUtility.parseQueryHints(ql, - queryString, baseURI); + // Wrong type of query. + throw new IllegalArgumentException(); - return new BigdataSailBooleanQuery(parsedQuery, this, queryHints); +// final ParsedBooleanQuery parsedQuery = QueryParserUtil +// .parseBooleanQuery(ql, queryString, baseURI); +// +// final Properties queryHints = QueryHintsUtility.parseQueryHints(ql, +// queryString, baseURI); +// +// return new BigdataSailBooleanQuery(parsedQuery, this, queryHints); } - /** - * {@inheritDoc} - * <p> - * Overridden to capture query hints from SPARQL queries. Query hints are - * embedded in query strings as namespaces. See {@link QueryHints#PREFIX} - * for more information. - */ + /** + * {@inheritDoc} + * <p> + * Overridden to capture query hints from SPARQL queries. Query hints are + * embedded in query strings as namespaces. See {@link QueryHints#PREFIX} + * for more information. + * <p> + * Note: In order to ensure that all code paths captures this information, + * all the other "prepare query" methods on this class delegate to this + * implementation. + */ @Override public SailQuery prepareQuery(final QueryLanguage ql, final String qs, final String baseURI) throws MalformedQueryException { - final ParsedQuery parsedQuery = QueryParserUtil.parseQuery(ql, qs, - baseURI); + final ParsedQuery parsedQuery; + final Properties queryHints; + final boolean describe; + if(QueryLanguage.SPARQL == ql) { + /* + * Make sure that we go through the overridden SPARQL parser. + */ + parsedQuery = new BigdataSPARQLParser().parseQuery(qs, baseURI); + queryHints = ((IBigdataParsedQuery) parsedQuery).getQueryHints(); + describe = QueryType.DESCRIBE == ((IBigdataParsedQuery) parsedQuery) + .getQueryType(); + } else { + /* + * Not a SPARQL query. + */ + parsedQuery = QueryParserUtil.parseQuery(ql, qs, baseURI); + queryHints = new Properties(); + describe = false; + } - final Properties queryHints = QueryHintsUtility.parseQueryHints(ql, qs, - baseURI); - if (parsedQuery instanceof ParsedTupleQuery) { return new BigdataSailTupleQuery((ParsedTupleQuery) parsedQuery, @@ -154,9 +206,6 @@ } else if (parsedQuery instanceof ParsedGraphQuery) { - final boolean describe = ql == QueryLanguage.SPARQL - && QueryType.fromQuery(qs) == QueryType.DESCRIBE; - return new BigdataSailGraphQuery((ParsedGraphQuery) parsedQuery, this, queryHints, describe); Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/IBigdataParsedQuery.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/IBigdataParsedQuery.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/IBigdataParsedQuery.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -0,0 +1,58 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on Jun 20, 2011 + */ + +package com.bigdata.rdf.sail; + +import java.util.Properties; + +/** + * Interface providing access to more state of the original SPARQL query AST. + * <p> + * Note: This interface is supported by various overrides of the openrdf SPARQL + * parser. Those overrides are required in order to gain access to the details + * of the AST. + * + * @author <a href="mailto:tho...@us...">Bryan Thompson</a> + * @version $Id$ + * + * @see BigdataSailQuery + */ +public interface IBigdataParsedQuery { + + /** + * The type of query. + */ + QueryType getQueryType(); + + /** + * Return query hints associated with this query. Query hints are embedded + * in query strings as namespaces. See {@link QueryHints#PREFIX} for more + * information. + */ + Properties getQueryHints(); + +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/IBigdataParsedQuery.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Deleted: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHintsUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHintsUtility.java 2011-06-20 20:43:47 UTC (rev 4749) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryHintsUtility.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -1,143 +0,0 @@ -/** - -Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. - -Contact: - SYSTAP, LLC - 4501 Tower Road - Greensboro, NC 27410 - lic...@bi... - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; version 2 of the License. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software -Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ -/* -Portions of this code are: - -Copyright Aduna (http://www.aduna-software.com/) � 2001-2007 - -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of the copyright holder nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -*/ -/* - * Created on Jun 20, 2011 - */ - -package com.bigdata.rdf.sail; - -import java.util.Map; -import java.util.Properties; -import java.util.StringTokenizer; - -import org.openrdf.query.MalformedQueryException; -import org.openrdf.query.QueryLanguage; -import org.openrdf.query.parser.sparql.ast.ASTQueryContainer; -import org.openrdf.query.parser.sparql.ast.ParseException; -import org.openrdf.query.parser.sparql.ast.SyntaxTreeBuilder; - -import com.bigdata.rdf.sail.sparql.BaseDeclProcessor; -import com.bigdata.rdf.sail.sparql.PrefixDeclProcessor; -import com.bigdata.rdf.sail.sparql.StringEscapesProcessor; - -/** - * A utility class for parsing {@link QueryHints}. - * - * @author <a href="mailto:mrp...@us...">Mike Personick</a> - * @version $Id$ - */ -public class QueryHintsUtility { - - /** - * Parse query hints from a query string. Query hints are embedded in the - * query string via special namespaces. - * <p> - * Note: The Sesame operator tree does not include the original query hints, - * which is why this method is not written against the operator tree. - * - * See {@link QueryHints#PREFIX} for more information. - */ - public static Properties parseQueryHints(final QueryLanguage ql, - final String queryString, final String baseURI) - throws MalformedQueryException { - try { - final Properties queryHints = new Properties(); - // currently only supporting SPARQL - if (ql == QueryLanguage.SPARQL) { - // the next four lines were taken directly from - // org.openrdf.query.parser.sparql.SPARQLParser.parseQuery(String queryStr, String baseURI) - final ASTQueryContainer qc = SyntaxTreeBuilder - .parseQuery(queryString); - StringEscapesProcessor.process(qc); - BaseDeclProcessor.process(qc, baseURI); - final Map<String, String> prefixes = PrefixDeclProcessor - .process(qc); - // iterate the namespaces - for (Map.Entry<String, String> prefix : prefixes.entrySet()) { - // if we see one that matches the magic namespace, try - // to parse it - if (prefix.getKey().equalsIgnoreCase(QueryHints.PREFIX)) { - String hints = prefix.getValue(); - // has to have a # and it can't be at the end - int i = hints.indexOf('#'); - if (i < 0 || i == hints.length() - 1) { - throw new MalformedQueryException( - "bad query hints: " + hints); - } - hints = hints.substring(i + 1); - // properties are separated by & - final StringTokenizer st = new StringTokenizer(hints, - "&"); - while (st.hasMoreTokens()) { - final String hint = st.nextToken(); - i = hint.indexOf('='); - if (i < 0 || i == hint.length() - 1) { - throw new MalformedQueryException( - "bad query hint: " + hint); - } - final String key = hint.substring(0, i); - final String val = hint.substring(i+1); - queryHints.put(key, val); - } - } - } - } - return queryHints; - } catch (ParseException e) { - throw new MalformedQueryException(e.getMessage(), e); - } - } - -} Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java 2011-06-20 20:43:47 UTC (rev 4749) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/QueryType.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -1,15 +1,5 @@ package com.bigdata.rdf.sail; -import org.openrdf.query.parser.sparql.ast.ASTAskQuery; -import org.openrdf.query.parser.sparql.ast.ASTConstructQuery; -import org.openrdf.query.parser.sparql.ast.ASTDescribeQuery; -import org.openrdf.query.parser.sparql.ast.ASTQuery; -import org.openrdf.query.parser.sparql.ast.ASTQueryContainer; -import org.openrdf.query.parser.sparql.ast.ASTSelectQuery; -import org.openrdf.query.parser.sparql.ast.ParseException; -import org.openrdf.query.parser.sparql.ast.SyntaxTreeBuilder; -import org.openrdf.query.parser.sparql.ast.TokenMgrError; - /** * Helper class to figure out the type of a query. */ @@ -21,32 +11,36 @@ } - /** - * Hack returns the query type based on the first occurrence of the - * keyword for any known query type in the query. - * - * @param queryStr - * The query. - * - * @return The query type. - */ - static public QueryType fromQuery(final String queryStr) { - - try { - final ASTQueryContainer queryContainer = SyntaxTreeBuilder - .parseQuery(queryStr); - final ASTQuery query = queryContainer.getQuery(); - if(query instanceof ASTSelectQuery) return QueryType.SELECT; - if(query instanceof ASTDescribeQuery) return QueryType.DESCRIBE; - if(query instanceof ASTConstructQuery) return QueryType.CONSTRUCT; - if(query instanceof ASTAskQuery) return QueryType.ASK; - throw new RuntimeException(queryContainer.toString()); - } catch (TokenMgrError ex) { - throw new RuntimeException(ex); - } catch (ParseException ex) { - throw new RuntimeException(ex); - } - - } +// /** +// * Hack returns the query type based on the first occurrence of the keyword +// * for any known query type in the query. +// * +// * @param queryStr +// * The query. +// * +// * @return The query type. +// * +// * @deprecated by {@link BigdataSPARQLParser#parseQuery(String, String)} +// * which makes this information available as metadata via the +// * {@link IBigdataParsedQuery} interface. +// */ +// static public QueryType fromQuery(final String queryStr) { +// +// try { +// final ASTQueryContainer queryContainer = SyntaxTreeBuilder +// .parseQuery(queryStr); +// final ASTQuery query = queryContainer.getQuery(); +// if(query instanceof ASTSelectQuery) return QueryType.SELECT; +// if(query instanceof ASTDescribeQuery) return QueryType.DESCRIBE; +// if(query instanceof ASTConstructQuery) return QueryType.CONSTRUCT; +// if(query instanceof ASTAskQuery) return QueryType.ASK; +// throw new RuntimeException(queryContainer.toString()); +// } catch (TokenMgrError ex) { +// throw new RuntimeException(ex); +// } catch (ParseException ex) { +// throw new RuntimeException(ex); +// } +// +// } } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlClient.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlClient.java 2011-06-20 20:43:47 UTC (rev 4749) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/NanoSparqlClient.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -65,7 +65,9 @@ import com.bigdata.counters.CAT; import com.bigdata.jsr166.LinkedBlockingQueue; +import com.bigdata.rdf.sail.IBigdataParsedQuery; import com.bigdata.rdf.sail.QueryType; +import com.bigdata.rdf.sail.sparql.BigdataSPARQLParser; /** * A flyweight utility for issuing queries to an http SPARQL endpoint. @@ -270,12 +272,15 @@ conn.setUseCaches(opts.useCaches); conn.setReadTimeout(opts.timeout); - /* - * Set an appropriate Accept header for the query. - */ - final QueryType queryType = opts.queryType = QueryType - .fromQuery(opts.queryStr); + /* + * Set an appropriate Accept header for the query. + */ + final ParsedQuery parsedQuery = new BigdataSPARQLParser() + .parseQuery(opts.queryStr, opts.baseURI); + final QueryType queryType = opts.queryType = ((IBigdataParsedQuery) parsedQuery) + .getQueryType(); + switch(queryType) { case DESCRIBE: case CONSTRUCT: Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/BigdataSPARQLParser.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/BigdataSPARQLParser.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/BigdataSPARQLParser.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -0,0 +1,201 @@ +/* + * Copyright Aduna (http://www.aduna-software.com/) (c) 1997-2007. + * + * Licensed under the Aduna BSD-style license. + */ +package com.bigdata.rdf.sail.sparql; + +import java.util.Map; +import java.util.Properties; +import java.util.StringTokenizer; + +import org.openrdf.model.impl.ValueFactoryImpl; +import org.openrdf.query.Dataset; +import org.openrdf.query.MalformedQueryException; +import org.openrdf.query.algebra.TupleExpr; +import org.openrdf.query.parser.ParsedQuery; +import org.openrdf.query.parser.QueryParser; +import org.openrdf.query.parser.sparql.DatasetDeclProcessor; +import org.openrdf.query.parser.sparql.SPARQLParser; +import org.openrdf.query.parser.sparql.ast.ASTAskQuery; +import org.openrdf.query.parser.sparql.ast.ASTConstructQuery; +import org.openrdf.query.parser.sparql.ast.ASTDescribeQuery; +import org.openrdf.query.parser.sparql.ast.ASTQuery; +import org.openrdf.query.parser.sparql.ast.ASTQueryContainer; +import org.openrdf.query.parser.sparql.ast.ASTSelectQuery; +import org.openrdf.query.parser.sparql.ast.ParseException; +import org.openrdf.query.parser.sparql.ast.SyntaxTreeBuilder; +import org.openrdf.query.parser.sparql.ast.TokenMgrError; +import org.openrdf.query.parser.sparql.ast.VisitorException; + +import com.bigdata.rdf.sail.BigdataParsedBooleanQuery; +import com.bigdata.rdf.sail.BigdataParsedGraphQuery; +import com.bigdata.rdf.sail.BigdataParsedTupleQuery; +import com.bigdata.rdf.sail.IBigdataParsedQuery; +import com.bigdata.rdf.sail.QueryHints; +import com.bigdata.rdf.sail.QueryType; + +/** + * Overridden version of the openrdf 2.3 {@link SPARQLParser} class which + * extracts additional information required by the and associates it with the + * {@link ParsedQuery}. + * + * @author <a href="mailto:tho...@us...">Bryan Thompson</a> + * @version $Id$ + */ +public class BigdataSPARQLParser implements QueryParser { + + /** + * {@inheritDoc} + * + * @return An object which implements {@link IBigdataParsedQuery}. + * Additional information is available by casting the returned + * object to that interface. + */ + public ParsedQuery parseQuery(String queryStr, String baseURI) + throws MalformedQueryException + { + try { + ASTQueryContainer qc = SyntaxTreeBuilder.parseQuery(queryStr); + StringEscapesProcessor.process(qc); + BaseDeclProcessor.process(qc, baseURI); + Map<String, String> prefixes = PrefixDeclProcessor.process(qc); + WildcardProjectionProcessor.process(qc); + BlankNodeVarProcessor.process(qc); + TupleExpr tupleExpr = buildQueryModel(qc); + + ParsedQuery query; + + // Note: Bigdata override. + final Properties queryHints = getQueryHints(qc); + + // Note: Constructors in if then else are overridden too. + ASTQuery queryNode = qc.getQuery(); + if (queryNode instanceof ASTSelectQuery) { + query = new BigdataParsedTupleQuery(tupleExpr, + QueryType.SELECT, queryHints); + } + else if (queryNode instanceof ASTConstructQuery) { + query = new BigdataParsedGraphQuery(tupleExpr, prefixes, + QueryType.CONSTRUCT, queryHints); + } + else if (queryNode instanceof ASTAskQuery) { + query = new BigdataParsedBooleanQuery(tupleExpr, QueryType.ASK, + queryHints); + } + else if (queryNode instanceof ASTDescribeQuery) { + query = new BigdataParsedGraphQuery(tupleExpr, prefixes, + QueryType.DESCRIBE, queryHints); + } + else { + throw new RuntimeException("Unexpected query type: " + queryNode.getClass()); + } + + // Handle dataset declaration + Dataset dataset = DatasetDeclProcessor.process(qc); + if (dataset != null) { + query.setDataset(dataset); + } + + return query; + } + catch (ParseException e) { + throw new MalformedQueryException(e.getMessage(), e); + } + catch (TokenMgrError e) { + throw new MalformedQueryException(e.getMessage(), e); + } + } + + private TupleExpr buildQueryModel(ASTQueryContainer qc) + throws MalformedQueryException + { + TupleExprBuilder tupleExprBuilder = new TupleExprBuilder(new ValueFactoryImpl()); + try { + return (TupleExpr)qc.jjtAccept(tupleExprBuilder, null); + } + catch (VisitorException e) { + throw new MalformedQueryException(e.getMessage(), e); + } + } + + static private Properties getQueryHints(final ASTQueryContainer qc) + throws MalformedQueryException { +// try { + final Properties queryHints = new Properties(); +// // currently only supporting SPARQL +// if (ql == QueryLanguage.SPARQL) { +// // the next four lines were taken directly from +// // org.openrdf.query.parser.sparql.SPARQLParser.parseQuery(String queryStr, String baseURI) +// final ASTQueryContainer qc = SyntaxTreeBuilder +// .parseQuery(queryString); +// StringEscapesProcessor.process(qc); +// BaseDeclProcessor.process(qc, baseURI); + final Map<String, String> prefixes = PrefixDeclProcessor + .process(qc); + // iterate the namespaces + for (Map.Entry<String, String> prefix : prefixes.entrySet()) { + // if we see one that matches the magic namespace, try + // to parse it + if (prefix.getKey().equalsIgnoreCase(QueryHints.PREFIX)) { + String hints = prefix.getValue(); + // has to have a # and it can't be at the end + int i = hints.indexOf('#'); + if (i < 0 || i == hints.length() - 1) { + throw new MalformedQueryException( + "bad query hints: " + hints); + } + hints = hints.substring(i + 1); + // properties are separated by & + final StringTokenizer st = new StringTokenizer(hints, + "&"); + while (st.hasMoreTokens()) { + final String hint = st.nextToken(); + i = hint.indexOf('='); + if (i < 0 || i == hint.length() - 1) { + throw new MalformedQueryException( + "bad query hint: " + hint); + } + final String key = hint.substring(0, i); + final String val = hint.substring(i+1); + queryHints.put(key, val); + } + } + } +// } + return queryHints; +// } catch (ParseException e) { +// throw new MalformedQueryException(e.getMessage(), e); +// } + } + +// public static void main(String[] args) +// throws java.io.IOException +// { +// System.out.println("Your SPARQL query:"); +// +// BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); +// +// StringBuilder buf = new StringBuilder(); +// String line = null; +// while ((line = in.readLine()) != null) { +// if (line.length() > 0) { +// buf.append(' ').append(line).append('\n'); +// } +// else { +// String queryStr = buf.toString().trim(); +// if (queryStr.length() > 0) { +// try { +// SPARQLParser parser = new SPARQLParser(); +// parser.parseQuery(queryStr, null); +// } +// catch (Exception e) { +// System.err.println(e.getMessage()); +// e.printStackTrace(); +// } +// } +// buf.setLength(0); +// } +// } +// } +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/BigdataSPARQLParser.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/BlankNodeVarProcessor.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/BlankNodeVarProcessor.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/BlankNodeVarProcessor.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -0,0 +1,121 @@ +/* + * Copyright Aduna (http://www.aduna-software.com/) (c) 1997-2006. + * + * Licensed under the Aduna BSD-style license. + */ +package com.bigdata.rdf.sail.sparql; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.openrdf.query.MalformedQueryException; +import org.openrdf.query.parser.sparql.ast.ASTBasicGraphPattern; +import org.openrdf.query.parser.sparql.ast.ASTBlankNode; +import org.openrdf.query.parser.sparql.ast.ASTBlankNodePropertyList; +import org.openrdf.query.parser.sparql.ast.ASTCollection; +import org.openrdf.query.parser.sparql.ast.ASTQueryContainer; +import org.openrdf.query.parser.sparql.ast.ASTVar; +import org.openrdf.query.parser.sparql.ast.SyntaxTreeBuilderTreeConstants; +import org.openrdf.query.parser.sparql.ast.VisitorException; + +/** + * Processes blank nodes in the query body, replacing them with variables while + * retaining scope. + * + * @author Arjohn Kampman + */ +public class BlankNodeVarProcessor extends ASTVisitorBase { + + public static void process(ASTQueryContainer qc) + throws MalformedQueryException + { + try { + qc.jjtAccept(new BlankNodeToVarConverter(), null); + } + catch (VisitorException e) { + throw new MalformedQueryException(e); + } + } + + /*-------------------------------------* + * Inner class BlankNodeToVarConverter * + *-------------------------------------*/ + + private static class BlankNodeToVarConverter extends ASTVisitorBase { + + private int anonVarNo = 1; + + private Map<String, String> conversionMap = new HashMap<String, String>(); + + private Set<String> usedBNodeIDs = new HashSet<String>(); + + private String createAnonVarName() { + return "-anon-" + anonVarNo++; + } + + @Override + public Object visit(ASTBasicGraphPattern node, Object data) + throws VisitorException + { + // The same Blank node ID cannot be used across Graph Patterns + usedBNodeIDs.addAll(conversionMap.keySet()); + + // Blank nodes are scope to Basic Graph Patterns + conversionMap.clear(); + + return super.visit(node, data); + } + + @Override + public Object visit(ASTBlankNode node, Object data) + throws VisitorException + { + String bnodeID = node.getID(); + String varName = findVarName(bnodeID); + + if (varName == null) { + varName = createAnonVarName(); + + if (bnodeID != null) { + conversionMap.put(bnodeID, varName); + } + } + + ASTVar varNode = new ASTVar(SyntaxTreeBuilderTreeConstants.JJTVAR); + varNode.setName(varName); + varNode.setAnonymous(true); + + node.jjtReplaceWith(varNode); + + return super.visit(node, data); + } + + private String findVarName(String bnodeID) throws VisitorException { + if (bnodeID == null) + return null; + String varName = conversionMap.get(bnodeID); + if (varName == null && usedBNodeIDs.contains(bnodeID)) + throw new VisitorException( + "BNodeID already used in another scope: " + bnodeID); + return varName; + } + + @Override + public Object visit(ASTBlankNodePropertyList node, Object data) + throws VisitorException + { + node.setVarName(createAnonVarName()); + return super.visit(node, data); + } + + @Override + public Object visit(ASTCollection node, Object data) + throws VisitorException + { + node.setVarName(createAnonVarName()); + return super.visit(node, data); + } + } +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/BlankNodeVarProcessor.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/GraphPattern.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/GraphPattern.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/GraphPattern.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -0,0 +1,163 @@ +/* + * Copyright Aduna (http://www.aduna-software.com/) (c) 1997-2007. + * + * Licensed under the Aduna BSD-style license. + */ +package com.bigdata.rdf.sail.sparql; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.openrdf.query.algebra.Join; +import org.openrdf.query.algebra.LeftJoin; +import org.openrdf.query.algebra.Filter; +import org.openrdf.query.algebra.SingletonSet; +import org.openrdf.query.algebra.StatementPattern; +import org.openrdf.query.algebra.TupleExpr; +import org.openrdf.query.algebra.ValueExpr; +import org.openrdf.query.algebra.Var; + +/** + * A graph pattern consisting of (required and optional) tuple expressions and + * boolean constraints. + * + * @author Arjohn Kampman + */ +public class GraphPattern { + + /** + * The context of this graph pattern. + */ + private Var contextVar; + + /** + * The StatementPattern-scope of this graph pattern. + */ + private StatementPattern.Scope spScope = StatementPattern.Scope.DEFAULT_CONTEXTS; + + /** + * The required tuple expressions in this graph pattern. + */ + private List<TupleExpr> requiredTEs = new ArrayList<TupleExpr>(); + + /** + * The optional tuple expressions in this graph pattern. + */ + private List<TupleExpr> optionalTEs = new ArrayList<TupleExpr>(); + + /** + * The boolean constraints in this graph pattern. + */ + private List<ValueExpr> constraints = new ArrayList<ValueExpr>(); + + /** + * Creates a new graph pattern. + */ + public GraphPattern() { + } + + /** + * Creates a new graph pattern that inherits the context and scope from a + * parent graph pattern. + */ + public GraphPattern(GraphPattern parent) { + contextVar = parent.contextVar; + spScope = parent.spScope; + } + + public void setContextVar(Var contextVar) { + this.contextVar = contextVar; + } + + public Var getContextVar() { + return contextVar; + } + + public void setStatementPatternScope(StatementPattern.Scope spScope) { + this.spScope = spScope; + } + + public StatementPattern.Scope getStatementPatternScope() { + return spScope; + } + + public void addRequiredTE(TupleExpr te) { + requiredTEs.add(te); + } + + public void addRequiredSP(Var subjVar, Var predVar, Var objVar) { + addRequiredTE(new StatementPattern(spScope, subjVar, predVar, objVar, contextVar)); + } + + public List<TupleExpr> getRequiredTEs() { + return Collections.unmodifiableList(requiredTEs); + } + + public void addOptionalTE(TupleExpr te) { + optionalTEs.add(te); + } + + public List<TupleExpr> getOptionalTEs() { + return Collections.unmodifiableList(optionalTEs); + } + + public void addConstraint(ValueExpr constraint) { + constraints.add(constraint); + } + + public void addConstraints(Collection<ValueExpr> constraints) { + this.constraints.addAll(constraints); + } + + public List<ValueExpr> getConstraints() { + return Collections.unmodifiableList(constraints); + } + + public List<ValueExpr> removeAllConstraints() { + List<ValueExpr> constraints = this.constraints; + this.constraints = new ArrayList<ValueExpr>(); + return constraints; + } + + /** + * Removes all tuple expressions and constraints. + */ + public void clear() { + requiredTEs.clear(); + optionalTEs.clear(); + constraints.clear(); + } + + /** + * Builds a combined tuple expression from the tuple expressions and + * constraints in this graph pattern. + * + * @return A tuple expression for this graph pattern. + */ + public TupleExpr buildTupleExpr() { + TupleExpr result; + + if (requiredTEs.isEmpty()) { + result = new SingletonSet(); + } + else { + result = requiredTEs.get(0); + + for (int i = 1; i < requiredTEs.size(); i++) { + result = new Join(result, requiredTEs.get(i)); + } + } + + for (TupleExpr optTE : optionalTEs) { + result = new LeftJoin(result, optTE); + } + + for (ValueExpr constraint : constraints) { + result = new Filter(result, constraint); + } + + return result; + } +} Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/GraphPattern.java ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/TupleExprBuilder.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/TupleExprBuilder.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/TupleExprBuilder.java 2011-06-20 21:36:37 UTC (rev 4750) @@ -0,0 +1,924 @@ +/* + * Copyright Aduna (http://www.aduna-software.com/) (c) 1997-2007. + * + * Licensed under the Aduna BSD-style license. + */ +package com.bigdata.rdf.sail.sparql; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.openrdf.model.Literal; +import org.openrdf.model.URI; +import org.openrdf.model.Value; +import org.openrdf.model.ValueFactory; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.query.algebra.And; +import org.openrdf.query.algebra.BNodeGenerator; +import org.openrdf.query.algebra.Bound; +import org.openrdf.query.algebra.Compare; +import org.openrdf.query.algebra.Datatype; +import org.openrdf.query.algebra.Distinct; +import org.openrdf.query.algebra.EmptySet; +import org.openrdf.query.algebra.Extension; +import org.openrdf.query.algebra.ExtensionElem; +import org.openrdf.query.algebra.Filter; +import org.openrdf.query.algebra.FunctionCall; +import org.openrdf.query.algebra.IsBNode; +import org.openrdf.query.algebra.IsLiteral; +import org.openrdf.query.algebra.IsURI; +import org.openrdf.query.algebra.Join; +import org.openrdf.query.algebra.Lang; +import org.openrdf.query.algebra.LangMatches; +import org.openrdf.query.algebra.LeftJoin; +import org.openrdf.query.algebra.MathExpr; +import org.openrdf.query.algebra.MultiProjection; +import org.openrdf.query.algebra.Not; +import org.openrdf.query.algebra.Or; +import org.openrdf.query.algebra.Order; +import org.openrdf.query.algebra.OrderElem; +import org.openrdf.query.algebra.Projection; +import org.openrdf.query.algebra.ProjectionElem; +import org.openrdf.query.algebra.ProjectionElemList; +import org.openrdf.query.algebra.Reduced; +import org.openrdf.query.algebra.Regex; +import org.openrdf.query.algebra.SameTerm; +import org.openrdf.query.algebra.Slice; +import org.openrdf.query.algebra.StatementPattern; +import org.openrdf.query.algebra.Str; +import org.openrdf.query.algebra.TupleExpr; +import org.openrdf.query.algebra.Union; +import org.openrdf.query.algebra.ValueConstant; +import org.openrdf.query.algebra.ValueExpr; +import org.openrdf.query.algebra.Var; +import org.openrdf.query.algebra.StatementPattern.Scope; +import org.openrdf.query.algebra.helpers.StatementPatternCollector; +import org.openrdf.query.parser.sparql.ast.ASTAnd; +import org.openrdf.query.parser.sparql.ast.ASTAskQuery; +import org.openrdf.query.parser.sparql.ast.ASTBlankNode; +import org.openrdf.query.parser.sparql.ast.ASTBlankNodePropertyList; +import org.openrdf.query.parser.sparql.ast.ASTBound; +import org.openrdf.query.parser.sparql.ast.ASTCollection; +import org.openrdf.query.parser.sparql.ast.ASTCompare; +import org.openrdf.query.parser.sparql.ast.ASTConstraint; +import org.openrdf.query.parser.sparql.ast.ASTConstruct; +import org.openrdf.query.parser.sparql.ast.ASTConstructQuery; +import org.openrdf.query.parser.sparql.ast.ASTDatatype; +import org.openrdf.query.parser.sparql.ast.ASTDescribe; +import org.openrdf.query.parser.sparql.ast.ASTDescribeQuery; +import org.openrdf.query.parser.sparql.ast.ASTFalse; +import org.openrdf.query.parser.sparql.ast.ASTFunctionCall; +import org.openrdf.query.parser.sparql.ast.ASTGraphGraphPattern; +import org.openrdf.query.parser.sparql.ast.ASTGraphPatternGroup; +import org.openrdf.query.parser.sparql.ast.ASTIRI; +import org.openrdf.query.parser.sparql.ast.ASTIsBlank; +import org.openrdf.query.parser.sparql.ast.ASTIsIRI; +import org.openrdf.query.parser.sparql.ast.ASTIsLiteral; +import org.openrdf.query.parser.sparql.ast.ASTLang; +import org.openrdf.query.parser.sparql.ast.ASTLangMatches; +import org.openrdf.query.parser.sparql.ast.ASTLimit; +import org.openrdf.query.parser.sparql.ast.ASTMath; +import org.openrdf.query.parser.sparql.ast.ASTNot; +import org.openrdf.query.parser.sparql.ast.ASTNumericLiteral; +import org.openrdf.query.parser.sparql.ast.ASTObjectList; +import org.openrdf.query.parser.sparql.ast.ASTOffset; +import org.openrdf.query.parser.sparql.ast.ASTOptionalGraphPattern; +import org.openrdf.query.parser.sparql.ast.ASTOr; +import org.openrdf.query.parser.sparql.ast.ASTOrderClause; +import org.openrdf.query.parser.sparql.ast.ASTOrderCondition; +import org.openrdf.query.parser.sparql.ast.ASTPropertyList; +import org.openrdf.query.parser.sparql.ast.ASTQName; +import org.openrdf.query.parser.sparql.ast.ASTQueryContainer; +import org.openrdf.query.parser.sparql.ast.ASTRDFLiteral; +import org.openrdf.query.parser.sparql.ast.ASTRegexExpression; +import org.openrdf.query.parser.sparql.ast.ASTSameTerm; +import org.openrdf.query.parser.sparql.ast.ASTSelect; +import org.openrdf.query.parser.sparql.ast.ASTSelectQuery; +import org.openrdf.query.parser.sparql.ast.ASTStr; +import org.openrdf.query.parser.sparql.ast.ASTString; +import org.openrdf.query.parser.sparql.ast.ASTTrue; +import org.openrdf.query.parser.sparql.ast.ASTUnionGraphPattern; +import org.openrdf.query.parser.sparql.ast.ASTVar; +import org.openrdf.query.parser.sparql.ast.Node; +import org.openrdf.query.parser.sparql.ast.VisitorException; + +/** + * @author Arjohn Kampman + */ +public class TupleExprBuilder extends ASTVisitorBase { + + /*-----------* + * Variables * + *-----------*/ + + private ValueFactory valueFactory; + + private GraphPattern graphPattern; + + private int constantVarID = 1; + + /*--------------* + * Constructors * + *--------------*/ + + public TupleExprBuilder(ValueFactory valueFactory) { + this.valueFactory = valueFactory; + } + + /*---------* + * Methods * + *---------*/ + + private Var valueExpr2Var(ValueExpr valueExpr) { + if (valueExpr instanceof Var) { + return (Var)valueExpr; + } + else if (valueExpr instanceof ValueConstant) { + return createConstVar(((ValueConstant)valueExpr).getValue()); + } + else if (valueExpr == null) { + throw new IllegalArgumentException("valueExpr is null"); + } + else { + throw new IllegalArgumentException("valueExpr is a: " + valueExpr.getClass()); + } + } + + private Var createConstVar(Value value) { + Var var = createAnonVar("-const-" + constantVarID++); + var.setValue(value); + return var; + } + + private Var createAnonVar(String varName) { + Var var = new Var(varName); + var.setAnonymous(true); + return var; + } + + @Override + public TupleExpr visit(ASTQueryContainer node, Object data) + throws VisitorException + { + // Skip the prolog, any information it contains should already have been + // processed + return (TupleExpr)node.getQuery().jjtAccept(this, null); + } + + @Override + public TupleExpr visit(ASTSelectQuery node, Object data) + throws VisitorException + { + // Start with building the graph pattern + graphPattern = new GraphPattern(); + node.getWhereClause().jjtAccept(this, null); + TupleExpr tupleExpr = graphPattern.buildTupleExpr(); + + // Apply result ordering + ASTOrderClause orderNode = node.getOrderClause(); + if (orderNode != null) { + List<OrderElem> orderElemements = (List<OrderElem>)orderNode.jjtAccept(this, null); + tupleExpr = new Order(tupleExpr, orderElemements); + } + + // Apply projection + tupleExpr = (TupleExpr)node.getSelect().jjtAccept(this, tupleExpr); + + // Process limit and offset clauses + ASTLimit limitNode = node.getLimit(); + int limit = -1; + if (limitNode != null) { + limit = (Integer)limitNode.jjtAccept(this, null); + } + + ASTOffset offsetNode = node.getOffset(); + int offset = -1; + if (offsetNode != null) { + offset = (Integer)offsetNode.jjtAccept(this, null); + } + + if (offset >= 1 || limit >= 0) { + tupleExpr = new Slice(tupleExpr, offset, limit); + } + + return tupleExpr; + } + + @Override + public TupleExpr visit(ASTSelect node, Object data) + throws VisitorException + { + TupleExpr result = (TupleExpr)data; + + ProjectionElemList projElemList = new ProjectionElemList(); + + for (int i = 0; i < node.jjtGetNumChildren(); i++) { + Var projVar = (Var)node.jjtGetChild(i).jjtAccept(this, null); + projElemList.addElement(new ProjectionElem(projVar.getName())); + } + + result = new Projection(result, projElemList); + + if (node.isDistinct()) { + result = new Distinct(result); + } + else if (node.isReduced()) { + result = new Reduced(result); + } + + return result; + } + + @Override + public TupleExpr visit(ASTConstructQuery node, Object data) + throws VisitorException + { + // Start wit... [truncated message content] |
From: <tho...@us...> - 2011-06-24 19:21:57
|
Revision: 4795 http://bigdata.svn.sourceforge.net/bigdata/?rev=4795&view=rev Author: thompsonbry Date: 2011-06-24 19:21:49 +0000 (Fri, 24 Jun 2011) Log Message: ----------- Bug fix where mutation servlet code paths did not do an explicit conn.rollback() on an error path. Added support for ACID PUT using DESCRIBE or CONSTRUCT query in combination with a request body containing RDF data. This closes [1]. [1] https://sourceforge.net/apps/trac/bigdata/ticket/332 (ACID PUT API for NSS) Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/DeleteServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/InsertServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/UpdateServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java 2011-06-24 17:43:27 UTC (rev 4794) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java 2011-06-24 19:21:49 UTC (rev 4795) @@ -776,8 +776,18 @@ * "DELETE WITH QUERY" where this method is used in a context which writes * onto an internal pipe rather than onto the {@link HttpServletResponse}. * + * @param namespace + * The namespace associated with the {@link AbstractTripleStore} + * view. + * @param timestamp + * The timestamp associated with the {@link AbstractTripleStore} + * view. * @param queryStr * The query. + * @param acceptOverride + * Override the Accept header (optional). This is used by UPDATE + * and DELETE so they can control the {@link RDFFormat} of the + * materialized query results. * @param req * The request. * @param os @@ -791,6 +801,7 @@ final String namespace,// final long timestamp,// final String queryStr,// + final String acceptOverride,// final HttpServletRequest req,// final OutputStream os) throws MalformedQueryException { @@ -807,13 +818,15 @@ * query exactly once in order to minimize the resources associated with * the query parser. */ - final ParsedQuery parsedQuery = m_queryParser.parseQuery(queryStr, baseURI); + final ParsedQuery parsedQuery = m_queryParser.parseQuery(queryStr, + baseURI); - if(log.isDebugEnabled()) + if (log.isDebugEnabled()) log.debug(parsedQuery.toString()); - - final QueryType queryType = ((IBigdataParsedQuery) parsedQuery).getQueryType(); + final QueryType queryType = ((IBigdataParsedQuery) parsedQuery) + .getQueryType(); + /* * When true, provide an "explanation" for the query (query plan, query * evaluation statistics) rather than the results of the query. @@ -833,8 +846,8 @@ * has some stuff related to generating Accept headers in their * RDFFormat which could bear some more looking into in this regard.) */ - final String acceptStr = explain ? "text/html" : req - .getHeader("Accept"); + final String acceptStr = explain ? "text/html" + : acceptOverride != null ? acceptOverride : req.getHeader("Accept"); switch (queryType) { case ASK: { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/DeleteServlet.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/DeleteServlet.java 2011-06-24 17:43:27 UTC (rev 4794) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/DeleteServlet.java 2011-06-24 19:21:49 UTC (rev 4795) @@ -98,8 +98,12 @@ final InputStream is = newPipedInputStream(os); try { + // Use this format for the query results. + final RDFFormat format = RDFFormat.NTRIPLES; + final AbstractQueryTask queryTask = getBigdataRDFContext() .getQueryTask(namespace, ITx.READ_COMMITTED, queryStr, + format.getDefaultMIMEType(), req, os); switch (queryTask.queryType) { @@ -120,21 +124,6 @@ conn = getBigdataRDFContext().getUnisolatedConnection( namespace); - /* - * TODO The RDF for the *query* will be generated using the - * MIME type negotiated based on the Accept header (if any) - * in the DELETE request. That means that we need to look at - * the Accept header here and chose the right RDFFormat for - * the parser. (The alternative is to have an alternative - * way to run the query task where we specify the MIME Type - * of the result directly. That might be better all around.) - */ - - final String contentType = req.getContentType(); - - final RDFFormat format = RDFFormat.forMIMEType(contentType, - RDFFormat.RDFXML); - final RDFParserFactory factory = RDFParserRegistry .getInstance().get(format); @@ -171,7 +160,14 @@ final long elapsed = System.currentTimeMillis() - begin; reportModifiedCount(resp, nmodified.get(), elapsed); + + } catch(Throwable t) { + if(conn != null) + conn.rollback(); + + throw new RuntimeException(t); + } finally { if (conn != null) @@ -200,15 +196,15 @@ final String contentType = req.getContentType(); - final String queryStr = req.getRequestURI(); + final String queryStr = req.getParameter("query"); - if (contentType != null) { + if (queryStr != null) { - doDeleteWithBody(req, resp); + doDeleteWithQuery(req, resp); - } else if (queryStr != null) { + } else if (contentType != null) { - doDeleteWithQuery(req, resp); + doDeleteWithBody(req, resp); } else { @@ -304,6 +300,13 @@ reportModifiedCount(resp, nmodified.get(), elapsed); + } catch(Throwable t) { + + if (conn != null) + conn.rollback(); + + throw new RuntimeException(t); + } finally { if (conn != null) @@ -323,7 +326,7 @@ /** * Helper class removes statements from the sail as they are visited by a parser. */ - private static class RemoveStatementHandler extends RDFHandlerBase { + static class RemoveStatementHandler extends RDFHandlerBase { private final BigdataSailConnection conn; private final AtomicLong nmodified; @@ -337,7 +340,8 @@ } - public void handleStatement(Statement stmt) throws RDFHandlerException { + public void handleStatement(final Statement stmt) + throws RDFHandlerException { try { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/InsertServlet.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/InsertServlet.java 2011-06-24 17:43:27 UTC (rev 4794) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/InsertServlet.java 2011-06-24 19:21:49 UTC (rev 4795) @@ -186,6 +186,13 @@ return; + } catch(Throwable t) { + + if(conn != null) + conn.rollback(); + + throw new RuntimeException(t); + } finally { if (conn != null) @@ -350,6 +357,13 @@ reportModifiedCount(resp, nmodified.get(), elapsed); + } catch(Throwable t) { + + if(conn != null) + conn.rollback(); + + throw new RuntimeException(t); + } finally { if (conn != null) @@ -369,7 +383,7 @@ /** * Helper class adds statements to the sail as they are visited by a parser. */ - private static class AddStatementHandler extends RDFHandlerBase { + static class AddStatementHandler extends RDFHandlerBase { private final BigdataSailConnection conn; private final AtomicLong nmodified; @@ -380,7 +394,8 @@ this.nmodified = nmodified; } - public void handleStatement(Statement stmt) throws RDFHandlerException { + public void handleStatement(final Statement stmt) + throws RDFHandlerException { try { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java 2011-06-24 17:43:27 UTC (rev 4794) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/QueryServlet.java 2011-06-24 19:21:49 UTC (rev 4795) @@ -130,7 +130,7 @@ * query. */ queryTask = context.getQueryTask(namespace, timestamp, - queryStr, req, os); + queryStr, null/*acceptOverride*/, req, os); } catch (MalformedQueryException ex) { /* * Send back a BAD REQUEST (400) along with the text of the Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/UpdateServlet.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/UpdateServlet.java 2011-06-24 17:43:27 UTC (rev 4794) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/UpdateServlet.java 2011-06-24 19:21:49 UTC (rev 4795) @@ -1,17 +1,30 @@ package com.bigdata.rdf.sail.webapp; +import java.io.IOException; +import java.io.InputStream; +import java.io.PipedOutputStream; +import java.util.concurrent.FutureTask; +import java.util.concurrent.atomic.AtomicLong; + import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; +import org.openrdf.rio.RDFFormat; +import org.openrdf.rio.RDFParser; +import org.openrdf.rio.RDFParserFactory; +import org.openrdf.rio.RDFParserRegistry; +import com.bigdata.journal.ITx; +import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; +import com.bigdata.rdf.sail.webapp.BigdataRDFContext.AbstractQueryTask; +import com.bigdata.rdf.sail.webapp.DeleteServlet.RemoveStatementHandler; +import com.bigdata.rdf.sail.webapp.InsertServlet.AddStatementHandler; + /** * Handler for UPDATE operations (PUT). * * @author martyncutcher - * - * FIXME The UPDATE API is not finished yet. It will provide - * DELETE+INSERT semantics. */ public class UpdateServlet extends BigdataRDFServlet { @@ -28,8 +41,222 @@ } @Override - protected void doPut(HttpServletRequest req, HttpServletResponse resp) { - throw new UnsupportedOperationException(); + protected void doPut(HttpServletRequest req, HttpServletResponse resp) + throws IOException { + + final String queryStr = req.getParameter("query"); + + final String contentType = req.getContentType(); + + if(contentType == null) { + + resp.setStatus(HttpServletResponse.SC_BAD_REQUEST); + + } + + if(queryStr == null) { + + resp.setStatus(HttpServletResponse.SC_BAD_REQUEST); + + } + + doUpdateWithQuery(req, resp); + } + /** + * Delete all statements materialized by a DESCRIBE or CONSTRUCT query and + * then insert all statements in the request body. + * <p> + * Note: To avoid materializing the statements, this runs the query against + * the last commit time and uses a pipe to connect the query directly to the + * process deleting the statements. This is done while it is holding the + * unisolated connection which prevents concurrent modifications. Therefore + * the entire SELECT + DELETE operation is ACID. + */ + private void doUpdateWithQuery(final HttpServletRequest req, + final HttpServletResponse resp) throws IOException { + + final long begin = System.currentTimeMillis(); + + final String baseURI = req.getRequestURL().toString(); + + final String namespace = getNamespace(req); + + final String queryStr = req.getParameter("query"); + + if (queryStr == null) + throw new UnsupportedOperationException(); + + final String contentType = req.getContentType(); + + if (log.isInfoEnabled()) + log.info("Request body: " + contentType); + + final RDFFormat requestBodyFormat = RDFFormat.forMIMEType(contentType); + + if (requestBodyFormat == null) { + + buildResponse(resp, HTTP_BADREQUEST, MIME_TEXT_PLAIN, + "Content-Type not recognized as RDF: " + contentType); + + return; + + } + + final RDFParserFactory rdfParserFactory = RDFParserRegistry + .getInstance().get(requestBodyFormat); + + if (rdfParserFactory == null) { + + buildResponse(resp, HTTP_INTERNALERROR, MIME_TEXT_PLAIN, + "Parser factory not found: Content-Type=" + + contentType + ", format=" + requestBodyFormat); + + return; + + } + + if (log.isInfoEnabled()) + log.info("update with query: " + queryStr); + + try { + + /* + * Note: pipe is drained by this thread to consume the query + * results, which are the statements to be deleted. + */ + final PipedOutputStream os = new PipedOutputStream(); + final InputStream is = newPipedInputStream(os); + try { + + // Use this format for the query results. + final RDFFormat deleteQueryFormat = RDFFormat.NTRIPLES; + + final AbstractQueryTask queryTask = getBigdataRDFContext() + .getQueryTask(namespace, ITx.READ_COMMITTED, queryStr, + deleteQueryFormat.getDefaultMIMEType(), + req, os); + + switch (queryTask.queryType) { + case DESCRIBE: + case CONSTRUCT: + break; + default: + buildResponse(resp, HTTP_BADREQUEST, MIME_TEXT_PLAIN, + "Must be DESCRIBE or CONSTRUCT query."); + return; + } + + final AtomicLong nmodified = new AtomicLong(0L); + + BigdataSailRepositoryConnection conn = null; + try { + + conn = getBigdataRDFContext().getUnisolatedConnection( + namespace); + + // Run DELETE + { + + final RDFParserFactory factory = RDFParserRegistry + .getInstance().get(deleteQueryFormat); + + final RDFParser rdfParser = factory.getParser(); + + rdfParser.setValueFactory(conn.getTripleStore() + .getValueFactory()); + + rdfParser.setVerifyData(false); + + rdfParser.setStopAtFirstError(true); + + rdfParser + .setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); + + rdfParser.setRDFHandler(new RemoveStatementHandler(conn + .getSailConnection(), nmodified)); + + // Wrap as Future. + final FutureTask<Void> ft = new FutureTask<Void>( + queryTask); + + // Submit query for evaluation. + getBigdataRDFContext().queryService.execute(ft); + + // Run parser : visited statements will be deleted. + rdfParser.parse(is, baseURI); + + // Await the Future (of the Query) + ft.get(); + + } + + // Run INSERT + { + + /* + * There is a request body, so let's try and parse it. + */ + + final RDFParser rdfParser = rdfParserFactory + .getParser(); + + rdfParser.setValueFactory(conn.getTripleStore() + .getValueFactory()); + + rdfParser.setVerifyData(true); + + rdfParser.setStopAtFirstError(true); + + rdfParser + .setDatatypeHandling(RDFParser.DatatypeHandling.IGNORE); + + rdfParser.setRDFHandler(new AddStatementHandler(conn + .getSailConnection(), nmodified)); + + /* + * Run the parser, which will cause statements to be + * inserted. + */ + rdfParser.parse(req.getInputStream(), baseURI); + + } + + // Commit the mutation. + conn.commit(); + + final long elapsed = System.currentTimeMillis() - begin; + + reportModifiedCount(resp, nmodified.get(), elapsed); + + } catch(Throwable t) { + + if(conn != null) + conn.rollback(); + + throw new RuntimeException(t); + + } finally { + + if (conn != null) + conn.close(); + + } + + } catch (Throwable t) { + + throw BigdataRDFServlet.launderThrowable(t, resp, queryStr); + + } + + } catch (Exception ex) { + + // Will be rendered as an INTERNAL_ERROR. + throw new RuntimeException(ex); + + } + + } + } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java 2011-06-24 17:43:27 UTC (rev 4794) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java 2011-06-24 19:21:49 UTC (rev 4795) @@ -36,6 +36,7 @@ import org.openrdf.model.vocabulary.RDF; import org.openrdf.model.vocabulary.RDFS; import org.openrdf.query.BindingSet; +import org.openrdf.query.MalformedQueryException; import org.openrdf.query.TupleQueryResultHandlerBase; import org.openrdf.query.resultio.BooleanQueryResultFormat; import org.openrdf.query.resultio.BooleanQueryResultParser; @@ -45,6 +46,7 @@ import org.openrdf.query.resultio.TupleQueryResultParser; import org.openrdf.query.resultio.TupleQueryResultParserFactory; import org.openrdf.query.resultio.TupleQueryResultParserRegistry; +import org.openrdf.repository.RepositoryException; import org.openrdf.rio.RDFFormat; import org.openrdf.rio.RDFHandlerException; import org.openrdf.rio.RDFParser; @@ -54,6 +56,7 @@ import org.openrdf.rio.RDFWriterFactory; import org.openrdf.rio.RDFWriterRegistry; import org.openrdf.rio.helpers.StatementCollector; +import org.openrdf.sail.SailException; import org.xml.sax.Attributes; import org.xml.sax.ext.DefaultHandler2; @@ -65,6 +68,7 @@ import com.bigdata.rdf.sail.BigdataSail; import com.bigdata.rdf.sail.BigdataSailRepository; import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; +import com.bigdata.rdf.sail.sparql.BigdataSPARQLParser; import com.bigdata.rdf.store.AbstractTripleStore; import com.bigdata.rdf.store.BD; import com.bigdata.rdf.store.LocalTripleStore; @@ -270,6 +274,16 @@ RDFFormat.RDFXML.getDefaultMIMEType() + ";q=1"// ; + /** + * The Content-Type (iff there will be a request body). + */ + public String contentType = null; + + /** + * The data to send as the request body (optional). + */ + public byte[] data = null; + /** The connection timeout (ms) -or- ZERO (0) for an infinite timeout. */ public int timeout = 0; @@ -350,22 +364,45 @@ // opts.defaultGraphUri, "UTF-8"))); } + if (log.isDebugEnabled()) { + log.debug("*** Request ***"); + log.debug(opts.serviceURL); + log.debug(opts.queryStr); + } + HttpURLConnection conn = null; try { - conn = doConnect(urlString.toString(), opts.method); - - conn.setReadTimeout(opts.timeout); - +// conn = doConnect(urlString.toString(), opts.method); + final URL url = new URL(urlString.toString()); + conn = (HttpURLConnection) url.openConnection(); + conn.setRequestMethod(opts.method); + conn.setDoOutput(true); + conn.setDoInput(true); + conn.setUseCaches(false); + conn.setReadTimeout(opts.timeout); conn.setRequestProperty("Accept", opts.acceptHeader); + + if (opts.contentType != null) { + + if (opts.data == null) + throw new AssertionError(); + + conn.setRequestProperty("Content-Type", opts.contentType); + + conn.setRequestProperty("Content-Length", Integer + .toString(opts.data.length)); - // write out the request headers - if (log.isDebugEnabled()) { - log.debug("*** Request ***"); - log.debug(opts.serviceURL); - log.debug(opts.queryStr); - } - + final OutputStream os = conn.getOutputStream(); + try { + os.write(opts.data); + os.flush(); + } finally { + os.close(); + } + + } + // connect. conn.connect(); @@ -1479,8 +1516,13 @@ doConstructTest("POST",RDFFormat.TRIX); } - private void doConstructTest(final String method, final RDFFormat format) - throws Exception { + /** + * Sets up a simple data set on the server. + * + * @throws SailException + * @throws RepositoryException + */ + private void setupDataOnServer() throws SailException, RepositoryException { final URI mike = new URIImpl(BD.NAMESPACE + "Mike"); final URI bryan = new URIImpl(BD.NAMESPACE + "Bryan"); @@ -1492,11 +1534,11 @@ final Literal label2 = new LiteralImpl("Bryan"); final BigdataSail sail = getSail(); - sail.initialize(); - final BigdataSailRepository repo = new BigdataSailRepository(sail); - try { + sail.initialize(); + final BigdataSailRepository repo = new BigdataSailRepository(sail); + final BigdataSailRepositoryConnection cxn = (BigdataSailRepositoryConnection) repo .getConnection(); try { @@ -1523,6 +1565,53 @@ } finally { sail.shutDown(); } + } + + private void doConstructTest(final String method, final RDFFormat format) + throws Exception { + + setupDataOnServer(); + final URI mike = new URIImpl(BD.NAMESPACE + "Mike"); + final URI bryan = new URIImpl(BD.NAMESPACE + "Bryan"); + final URI person = new URIImpl(BD.NAMESPACE + "Person"); +// final URI likes = new URIImpl(BD.NAMESPACE + "likes"); +// final URI rdf = new URIImpl(BD.NAMESPACE + "RDF"); +// final URI rdfs = new URIImpl(BD.NAMESPACE + "RDFS"); +// final Literal label1 = new LiteralImpl("Mike"); +// final Literal label2 = new LiteralImpl("Bryan"); +// +// final BigdataSail sail = getSail(); +// sail.initialize(); +// final BigdataSailRepository repo = new BigdataSailRepository(sail); +// +// try { +// +// final BigdataSailRepositoryConnection cxn = (BigdataSailRepositoryConnection) repo +// .getConnection(); +// try { +// +// cxn.setAutoCommit(false); +// +// cxn.add(mike, RDF.TYPE, person); +// cxn.add(mike, likes, rdf); +// cxn.add(mike, RDFS.LABEL, label1); +// cxn.add(bryan, RDF.TYPE, person); +// cxn.add(bryan, likes, rdfs); +// cxn.add(bryan, RDFS.LABEL, label2); +// +// /* +// * Note: The either flush() or commit() is required to flush the +// * statement buffers to the database before executing any +// * operations that go around the sail. +// */ +// cxn.commit(); +// } finally { +// cxn.close(); +// } +// +// } finally { +// sail.shutDown(); +// } // The expected results. final Graph expected = new GraphImpl(); @@ -1583,4 +1672,367 @@ } + /** + * Unit test for ACID UPDATE using PUT. This test is for the operation where + * a SPARQL selects the data to be deleted and the request body contains the + * statements to be inserted. + */ + public void test_PUT_UPDATE_WITH_QUERY() throws Exception { + + setupDataOnServer(); + + final URI mike = new URIImpl(BD.NAMESPACE + "Mike"); + final URI bryan = new URIImpl(BD.NAMESPACE + "Bryan"); +// final URI person = new URIImpl(BD.NAMESPACE + "Person"); + final URI likes = new URIImpl(BD.NAMESPACE + "likes"); + final URI rdf = new URIImpl(BD.NAMESPACE + "RDF"); + final URI rdfs = new URIImpl(BD.NAMESPACE + "RDFS"); + + // The format used to PUT the data. + final RDFFormat format = RDFFormat.NTRIPLES; + + /* + * This is the query that we will use to delete some triples from the + * database. + */ + final String deleteQueryStr =// + "prefix bd: <"+BD.NAMESPACE+"> " +// + "prefix rdf: <"+RDF.NAMESPACE+"> " +// + "prefix rdfs: <"+RDFS.NAMESPACE+"> " +// + "CONSTRUCT { ?x bd:likes bd:RDFS }" +// + "WHERE { " +// +// " ?x rdf:type bd:Person . " +// + " ?x bd:likes bd:RDFS " +// + "}"; + + /* + * First, run the query that we will use the delete the triples. This + * is a cross check on the expected behavior of the query. + */ + { + + // The expected results. + final Graph expected = new GraphImpl(); + { +// expected.add(new StatementImpl(mike, RDF.TYPE, person)); + expected.add(new StatementImpl(bryan, likes, rdfs)); + } + + final QueryOptions opts = new QueryOptions(); + opts.serviceURL = m_serviceURL; + opts.queryStr = deleteQueryStr; + opts.method = "GET"; + opts.acceptHeader = TupleQueryResultFormat.SPARQL + .getDefaultMIMEType(); + + assertSameGraph(expected, buildGraph(doSparqlQuery(opts, + requestPath))); + + } + + /* + * Setup the document containing the statement to be inserted by the + * UPDATE operation. + */ + final byte[] data; + { + final Graph g = new GraphImpl(); + + // The new data. + g.add(new StatementImpl(bryan, likes, rdf)); + + final RDFWriterFactory writerFactory = RDFWriterRegistry + .getInstance().get(format); + if (writerFactory == null) + fail("RDFWriterFactory not found: format=" + format); + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + final RDFWriter writer = writerFactory.getWriter(baos); + writer.startRDF(); + for (Statement stmt : g) { + writer.handleStatement(stmt); + } + writer.endRDF(); + data = baos.toByteArray(); + } + + /* + * Now, run the UPDATE operation. + */ + { + + final QueryOptions opts = new QueryOptions(); + opts.serviceURL = m_serviceURL; + opts.queryStr = deleteQueryStr; + opts.method = "PUT"; + //opts.acceptHeader = ...; + opts.contentType = RDFFormat.NTRIPLES.getDefaultMIMEType(); + opts.data = data; + final MutationResult ret = getMutationResult(doSparqlQuery(opts, + requestPath)); + assertEquals(2, ret.mutationCount);// FIXME 1 removed, but also 1 added. + + } + + /* + * Now verify the post-condition state. + */ + { + + /* + * This query verifies that we removed the right triple (nobody is + * left who likes 'rdfs'). + */ + { + + // The expected results. + final Graph expected = new GraphImpl(); + + final QueryOptions opts = new QueryOptions(); + opts.serviceURL = m_serviceURL; + opts.queryStr = deleteQueryStr; + opts.method = "GET"; + opts.acceptHeader = TupleQueryResultFormat.SPARQL + .getDefaultMIMEType(); + + assertSameGraph(expected, buildGraph(doSparqlQuery(opts, + requestPath))); + } + + /* This query verifies that we added the right triple (two people + * now like 'rdf'). + */ + { + + final String queryStr2 = // + "prefix bd: <" + BD.NAMESPACE + "> " + // + "prefix rdf: <" + RDF.NAMESPACE + "> " + // + "prefix rdfs: <" + RDFS.NAMESPACE + "> " + // + "CONSTRUCT { ?x bd:likes bd:RDF }" + // + "WHERE { " + // +// " ?x rdf:type bd:Person . " + // + " ?x bd:likes bd:RDF " + // + "}"; + + // The expected results. + final Graph expected = new GraphImpl(); + + expected.add(new StatementImpl(mike, likes, rdf)); + expected.add(new StatementImpl(bryan, likes, rdf)); + + final QueryOptions opts = new QueryOptions(); + opts.serviceURL = m_serviceURL; + opts.queryStr = queryStr2; + opts.method = "GET"; + opts.acceptHeader = TupleQueryResultFormat.SPARQL + .getDefaultMIMEType(); + + assertSameGraph(expected, buildGraph(doSparqlQuery(opts, + requestPath))); + + } + + } + + } + + /** + * Unit test verifies that you can have a CONSTRUCT SPARQL with an empty + * WHERE clause. + * + * @throws MalformedQueryException + */ + public void test_CONSTRUCT_TEMPLATE_ONLY() throws MalformedQueryException { + + final String deleteQueryStr =// + "prefix bd: <"+BD.NAMESPACE+"> " +// + "CONSTRUCT { bd:Bryan bd:likes bd:RDFS }" +// + "{}"; + + new BigdataSPARQLParser().parseQuery(deleteQueryStr, + "http://www.bigdata.com"); + + } + + /** + * Unit test where the "query" used to delete triples from the database + * consists solely of a CONSTRUCT "template" without a WHERE clause (the + * WHERE clause is basically optional as all elements of it are optional). + * + * @throws Exception + */ + public void test_PUT_UPDATE_WITH_CONSTRUCT_TEMPLATE_ONLY() throws Exception { + + setupDataOnServer(); + + final URI mike = new URIImpl(BD.NAMESPACE + "Mike"); + final URI bryan = new URIImpl(BD.NAMESPACE + "Bryan"); +// final URI person = new URIImpl(BD.NAMESPACE + "Person"); + final URI likes = new URIImpl(BD.NAMESPACE + "likes"); + final URI rdf = new URIImpl(BD.NAMESPACE + "RDF"); + final URI rdfs = new URIImpl(BD.NAMESPACE + "RDFS"); + + // The format used to PUT the data. + final RDFFormat format = RDFFormat.NTRIPLES; + + /* + * This is the query that we will use to delete some triples from the + * database. + */ + final String deleteQueryStr =// + "prefix bd: <"+BD.NAMESPACE+"> " +// + "CONSTRUCT { bd:Bryan bd:likes bd:RDFS }" +// + "{ }"; + + new BigdataSPARQLParser().parseQuery(deleteQueryStr, + "http://www.bigdata.com"); + + /* + * First, run the query that we will use the delete the triples. This + * is a cross check on the expected behavior of the query. + */ + { + + // The expected results. + final Graph expected = new GraphImpl(); + { +// expected.add(new StatementImpl(mike, RDF.TYPE, person)); + expected.add(new StatementImpl(bryan, likes, rdfs)); + } + + final QueryOptions opts = new QueryOptions(); + opts.serviceURL = m_serviceURL; + opts.queryStr = deleteQueryStr; + opts.method = "GET"; + opts.acceptHeader = TupleQueryResultFormat.SPARQL + .getDefaultMIMEType(); + + assertSameGraph(expected, buildGraph(doSparqlQuery(opts, + requestPath))); + + } + + /* + * Setup the document containing the statement to be inserted by the + * UPDATE operation. + */ + final byte[] data; + { + final Graph g = new GraphImpl(); + + // The new data. + g.add(new StatementImpl(bryan, likes, rdf)); + + final RDFWriterFactory writerFactory = RDFWriterRegistry + .getInstance().get(format); + if (writerFactory == null) + fail("RDFWriterFactory not found: format=" + format); + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + final RDFWriter writer = writerFactory.getWriter(baos); + writer.startRDF(); + for (Statement stmt : g) { + writer.handleStatement(stmt); + } + writer.endRDF(); + data = baos.toByteArray(); + } + + /* + * Now, run the UPDATE operation. + */ + { + + final QueryOptions opts = new QueryOptions(); + opts.serviceURL = m_serviceURL; + opts.queryStr = deleteQueryStr; + opts.method = "PUT"; + //opts.acceptHeader = ...; + opts.contentType = RDFFormat.NTRIPLES.getDefaultMIMEType(); + opts.data = data; + final MutationResult ret = getMutationResult(doSparqlQuery(opts, + requestPath)); + assertEquals(2, ret.mutationCount);// FIXME 1 removed, but also 1 added. + + } + + /* + * Now verify the post-condition state. + */ + { + + /* + * This query verifies that we removed the right triple (nobody is + * left who likes 'rdfs'). + */ + { + + final String queryStr2 = // + "prefix bd: <" + BD.NAMESPACE + "> " + // + "prefix rdf: <" + RDF.NAMESPACE + "> " + // + "prefix rdfs: <" + RDFS.NAMESPACE + "> " + // + "CONSTRUCT { ?x bd:likes bd:RDFS }" + // + "WHERE { " + // +// " ?x rdf:type bd:Person . " + // + " ?x bd:likes bd:RDFS " + // NB: Checks the kb! + "}"; + + // The expected results. + final Graph expected = new GraphImpl(); + + final QueryOptions opts = new QueryOptions(); + opts.serviceURL = m_serviceURL; + opts.queryStr = queryStr2; + opts.method = "GET"; + opts.acceptHeader = TupleQueryResultFormat.SPARQL + .getDefaultMIMEType(); + + assertSameGraph(expected, buildGraph(doSparqlQuery(opts, + requestPath))); + } + + /* This query verifies that we added the right triple (two people + * now like 'rdf'). + */ + { + + final String queryStr2 = // + "prefix bd: <" + BD.NAMESPACE + "> " + // + "prefix rdf: <" + RDF.NAMESPACE + "> " + // + "prefix rdfs: <" + RDFS.NAMESPACE + "> " + // + "CONSTRUCT { ?x bd:likes bd:RDF }" + // + "WHERE { " + // +// " ?x rdf:type bd:Person . " + // + " ?x bd:likes bd:RDF " + // + "}"; + + // The expected results. + final Graph expected = new GraphImpl(); + + expected.add(new StatementImpl(mike, likes, rdf)); + expected.add(new StatementImpl(bryan, likes, rdf)); + + final QueryOptions opts = new QueryOptions(); + opts.serviceURL = m_serviceURL; + opts.queryStr = queryStr2; + opts.method = "GET"; + opts.acceptHeader = TupleQueryResultFormat.SPARQL + .getDefaultMIMEType(); + + assertSameGraph(expected, buildGraph(doSparqlQuery(opts, + requestPath))); + + } + + } + + } + +// /** +// * Unit test for ACID UPDATE using PUT. This test is for the operation where +// * the request body is a multi-part MIME document conveying both the +// * statements to be removed and the statement to be inserted. +// */ +// public void test_PUT_UPDATE_WITH_MULTI_PART_MIME() { +// fail("write test"); +// } + } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |