From: <tho...@us...> - 2011-05-24 17:17:56
|
Revision: 4547 http://bigdata.svn.sourceforge.net/bigdata/?rev=4547&view=rev Author: thompsonbry Date: 2011-05-24 17:17:49 +0000 (Tue, 24 May 2011) Log Message: ----------- Resolution for [1]. Note: This change set changes the URL for the REST API from: http://hostname:port/ to http://hostname:port/sparql This commit also includes an updated version of autojar (v2.1) and an alternative way to slim down fastutils. [1] https://sourceforge.net/apps/trac/bigdata/ticket/303 (Default page for NanoSparqlServer) Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata/src/resources/logging/log4j.properties branches/QUADS_QUERY_BRANCH/bigdata-perf/bsbm/build.xml branches/QUADS_QUERY_BRANCH/bigdata-perf/bsbm3/build.xml branches/QUADS_QUERY_BRANCH/bigdata-perf/lubm/build.xml branches/QUADS_QUERY_BRANCH/bigdata-perf/lubm/src/resources/config/config.kb.sparql branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/WebAppUnassembled.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java branches/QUADS_QUERY_BRANCH/bigdata-war/src/resources/WEB-INF/web.xml branches/QUADS_QUERY_BRANCH/build.xml branches/QUADS_QUERY_BRANCH/src/build/autojar/autojar.jar Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata-war/src/html/index.html Removed Paths: ------------- branches/QUADS_QUERY_BRANCH/bigdata-war/WEB-INF/web.xml Modified: branches/QUADS_QUERY_BRANCH/bigdata/src/resources/logging/log4j.properties =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata/src/resources/logging/log4j.properties 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/bigdata/src/resources/logging/log4j.properties 2011-05-24 17:17:49 UTC (rev 4547) @@ -198,7 +198,7 @@ log4j.logger.com.bigdata.util.httpd.NanoHTTPD=DEBUG log4j.logger.com.bigdata.util.httpd.AbstractHTTPD=DEBUG -log4j.logger.com.bigdata.rdf.sail.bench.NanoSparqlServer=ALL +log4j.logger.com.bigdata.rdf.sail.webapp.NanoSparqlServer=ALL # Lehigh benchmark integration log4j.logger.edu.lehigh.swat.bench.ubt.bigdata=INFO Modified: branches/QUADS_QUERY_BRANCH/bigdata-perf/bsbm/build.xml =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-perf/bsbm/build.xml 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/bigdata-perf/bsbm/build.xml 2011-05-24 17:17:49 UTC (rev 4547) @@ -196,7 +196,7 @@ <arg value="${bsbm.resultsDir}/benchmark_result_pc${bsbm.pc}_runs${bsbm.runs}_mt${bsbm.mt}.xml" /> <!-- The SPARQL endpoint. --> - <arg value="http://localhost:${bsbm.nanoServerPort}/" /> + <arg value="http://localhost:${bsbm.nanoServerPort}/sparql/" /> <classpath> <path refid="runtime.classpath" /> Modified: branches/QUADS_QUERY_BRANCH/bigdata-perf/bsbm3/build.xml =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-perf/bsbm3/build.xml 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/bigdata-perf/bsbm3/build.xml 2011-05-24 17:17:49 UTC (rev 4547) @@ -165,7 +165,7 @@ <target name="run-sparql-query" depends="prepare" description="Run a single query read from a file.."> <java classname="com.bigdata.rdf.sail.webapp.NanoSparqlClient" fork="true" failonerror="true"> - <arg line="-f query5-instance01-keyRangeVersion.sparql http://localhost:${bsbm.nanoServerPort}/" /> + <arg line="-f query5-instance01-keyRangeVersion.sparql http://localhost:${bsbm.nanoServerPort}/sparql/" /> <classpath> <path refid="runtime.classpath" /> </classpath> Modified: branches/QUADS_QUERY_BRANCH/bigdata-perf/lubm/build.xml =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-perf/lubm/build.xml 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/bigdata-perf/lubm/build.xml 2011-05-24 17:17:49 UTC (rev 4547) @@ -87,7 +87,7 @@ <target name="run-load" depends="compile"> <!-- delete file if it exists so we load into a new journal. --> <delete verbose="true" file="${lubm.journalFile}" /> - <java classname="com.bigdata.rdf.store.DataLoader" fork="true" failonerror="true" dir="${build.dir}/bin"> + <java classname="com.bigdata.rdf.store.DataLoader" fork="true" failonerror="true" ><!--dir="${build.dir}/bin"--> <arg line="-closure -namespace ${lubm.namespace} ${lubm.journalPropertyFile} ${lubm.ontologyFile} ${lubm.dataDir}" /> <!-- specify/override the journal file name. --> <jvmarg line="${queryJvmArgs} -Dcom.bigdata.journal.AbstractJournal.file=${lubm.journalFile} @@ -104,7 +104,7 @@ <echo message="journalFile=${lubm.journalFile}"/> <echo message="namespace=${lubm.namespace}"/> <echo message="port=${lubm.nanoServerPort}"/> - <java classname="com.bigdata.rdf.sail.webapp.NanoSparqlServer" fork="true" failonerror="true" dir="${build.dir}/bin"> + <java classname="com.bigdata.rdf.sail.webapp.NanoSparqlServer" fork="true" failonerror="true"><!-- dir="${build.dir}/bin"--> <arg line="${lubm.nanoServerPort} ${lubm.namespace} ${lubm.journalPropertyFile}" /> <!-- specify/override the journal file name. --> <jvmarg line="${queryJvmArgs} -Dcom.bigdata.journal.AbstractJournal.file=${lubm.journalFile}" /> @@ -115,7 +115,7 @@ </target> <target name="run-query" depends="compile" description="Runs the benchmark queries against the loaded data."> - <java classname="edu.lehigh.swat.bench.ubt.Test" fork="true" failonerror="true" dir="${build.dir}/bin"> + <java classname="edu.lehigh.swat.bench.ubt.Test" fork="true" failonerror="true" ><!--dir="${build.dir}/bin"--> <jvmarg value="-Dlubm.warmUp=false" /> <jvmarg value="-Dlubm.queryTime=10" /> <jvmarg value="-Dlubm.queryParallel=1" /> @@ -129,25 +129,4 @@ </java> </target> - <target name="set-properties" depends="compile" description="Set or change properties for a kb instance. The new values are read from stdin."> - <java classname="com.bigdata.rdf.sail.BigdataSailHelper" fork="true" failonerror="true"> - <!-- -Note: You can't change the writeRetentionQueue capacity here. It is propagated to the IndexMetadata records -for the BTree instances and needs to be updated in place for each index. - -inputstring="com.bigdata.btree.writeRetentionQueue.capacity=8000" - -inputstring="com.bigdata.rdf.sail.starJoins=false" - -inputstring="com.bigdata.rdf.sail.starJoins=true\ncom.bigdata.relation.rule.eval.ProgramTask.maxParallelSubqueries=10" ---> - <!-- dir="${build.dir}/bin" --> - <arg line="${lubm.journalFile} LTS kb" /> - <jvmarg value="-Dlog4j.configuration=log4j.properties" /> - <classpath> - <path refid="runtime.classpath" /> - </classpath> - </java> - </target> - </project> Modified: branches/QUADS_QUERY_BRANCH/bigdata-perf/lubm/src/resources/config/config.kb.sparql =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-perf/lubm/src/resources/config/config.kb.sparql 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/bigdata-perf/lubm/src/resources/config/config.kb.sparql 2011-05-24 17:17:49 UTC (rev 4547) @@ -9,6 +9,6 @@ ontology=ignored data=ignored # Use the default namespace specified to NanoSparqlServer -database=http://localhost:80/ +database=http://localhost:81/sparql/ # Use a specific namespace regardless of the default specified to NanoSparqlServer -#database=http://localhost:80/namespace/LUBM_U50/ +#database=http://localhost:80/sparql/namespace/LUBM_U50/ Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java 2011-05-24 17:17:49 UTC (rev 4547) @@ -23,11 +23,14 @@ */ package com.bigdata.rdf.sail.webapp; -import java.io.File; import java.util.LinkedHashMap; import java.util.Map; +import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.handler.DefaultHandler; +import org.eclipse.jetty.server.handler.HandlerList; +import org.eclipse.jetty.server.handler.ResourceHandler; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.openrdf.rio.RDFParser; @@ -212,31 +215,32 @@ */ namespace = args[i++]; - /* - * Property file. - */ + // Note: This is checked by the ServletContextListener. +// /* +// * Property file. +// */ final String propertyFile = args[i++]; - final File file = new File(propertyFile); - if (!file.exists()) { - throw new RuntimeException("Could not find file: " + file); - } - boolean isJini = false; - if (propertyFile.endsWith(".config")) { - // scale-out. - isJini = true; - } else if (propertyFile.endsWith(".properties")) { - // local journal. - isJini = false; - } else { - /* - * Note: This is a hack, but we are recognizing the jini - * configuration file with a .config extension and the journal - * properties file with a .properties extension. - */ - usage(1/* status */, - "File should have '.config' or '.properties' extension: " - + file); - } +// final File file = new File(propertyFile); +// if (!file.exists()) { +// throw new RuntimeException("Could not find file: " + file); +// } +// boolean isJini = false; +// if (propertyFile.endsWith(".config")) { +// // scale-out. +// isJini = true; +// } else if (propertyFile.endsWith(".properties")) { +// // local journal. +// isJini = false; +// } else { +// /* +// * Note: This is a hack, but we are recognizing the jini +// * configuration file with a .config extension and the journal +// * properties file with a .properties extension. +// */ +// usage(1/* status */, +// "File should have '.config' or '.properties' extension: " +// + file); +// } /* * Setup the ServletContext properties. @@ -290,14 +294,22 @@ static public Server newInstance(final int port, final IIndexManager indexManager, final Map<String, String> initParams) { - final ServletContextHandler context = getContext(initParams); + final ServletContextHandler context = getContextHandler(initParams); // Force the use of the caller's IIndexManager. context.setAttribute(IIndexManager.class.getName(), indexManager); + final HandlerList handlers = new HandlerList(); + + handlers.setHandlers(new Handler[] { + context,// + getResourceHandler(initParams),// +// new DefaultHandler()// + }); + final Server server = new Server(port); - server.setHandler(context); + server.setHandler(handlers); return server; @@ -322,11 +334,19 @@ static public Server newInstance(final int port, final String propertyFile, final Map<String, String> initParams) { - final ServletContextHandler context = getContext(initParams); + final ServletContextHandler context = getContextHandler(initParams); + final HandlerList handlers = new HandlerList(); + + handlers.setHandlers(new Handler[] { + context,// + getResourceHandler(initParams),// +// new DefaultHandler()// + }); + final Server server = new Server(port); - server.setHandler(context); + server.setHandler(handlers); return server; @@ -338,7 +358,7 @@ * @param initParams * The init parameters, per the web.xml definition. */ - static private ServletContextHandler getContext( + static private ServletContextHandler getContextHandler( final Map<String, String> initParams) { if (initParams == null) @@ -365,23 +385,6 @@ } - // final ResourceHandler resource_handler = new ResourceHandler(); - // - // resource_handler.setDirectoriesListed(false); // Nope! - // - // resource_handler.setWelcomeFiles(new String[] { "index.html" }); - // - // final HandlerList handlers = new HandlerList(); - // - // handlers.setHandlers(new Handler[] { resource_handler, new - // DefaultHandler() }); - // - // setHandler(handlers); - - // FIXME Set to locate the flot files as part of the CountersServlet - // setup. - // resource_handler.setResourceBase(config.resourceBase); - // Performance counters. context.addServlet(new ServletHolder(new CountersServlet()), "/counters"); @@ -390,12 +393,41 @@ context.addServlet(new ServletHolder(new StatusServlet()), "/status"); // Core RDF REST API, including SPARQL query and update. - context.addServlet(new ServletHolder(new RESTServlet()), "/"); + context.addServlet(new ServletHolder(new RESTServlet()), "/sparql"); +// context.setResourceBase("bigdata-war/src/html"); +// +// context.setWelcomeFiles(new String[]{"index.html"}); + return context; } + private static ResourceHandler getResourceHandler( + final Map<String, String> initParams) { + + if (initParams == null) + throw new IllegalArgumentException(); + + final ResourceHandler resourceHandler = new ResourceHandler(); + + resourceHandler.setDirectoriesListed(false); // Nope! + + // FIXME Set to locate the flot files as part of the CountersServlet + // setup. + // resource_handler.setResourceBase(config.resourceBase); + + // Note: FileResource or ResourceCollection. +// resourceHandler.setBaseResource(new FileResource(...)); + + resourceHandler.setResourceBase("bigdata-war/src/html"); + + resourceHandler.setWelcomeFiles(new String[]{"index.html"}); + + return resourceHandler; + + } + /** * Print the optional message on stderr, print the usage information on * stderr, and then force the program to exit with the given status code. Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java 2011-05-24 17:17:49 UTC (rev 4547) @@ -91,8 +91,9 @@ protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) throws IOException { + // Note: Will report BadRequest if "?query" not present. m_queryServlet.doGet(req, resp); - + } /** Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/WebAppUnassembled.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/WebAppUnassembled.java 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/WebAppUnassembled.java 2011-05-24 17:17:49 UTC (rev 4547) @@ -58,7 +58,7 @@ * * @throws Exception */ - public static void main(String[] args) throws Exception + public static void main(final String[] args) throws Exception { // default port Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServer.java 2011-05-24 17:17:49 UTC (rev 4547) @@ -114,7 +114,7 @@ /** * The request path for the REST API under test. */ - final private static String requestPath = "/"; + final private static String requestPath = "/sparql"; protected void setUp() throws Exception { @@ -1032,7 +1032,7 @@ assertEquals(23, countResults(doSparqlQuery(opts, requestPath))); - doDeleteWithBody("", 23, format); + doDeleteWithBody(requestPath, 23, format); // No solutions (assuming a told triple kb or quads kb w/o axioms). assertEquals(0, countResults(doSparqlQuery(opts, requestPath))); @@ -1077,9 +1077,9 @@ HttpURLConnection conn = null; try { - final URL url = new URL(m_serviceURL + "/" + servlet+"?delete"); - conn = (HttpURLConnection) url.openConnection(); - conn.setRequestMethod("POST"); + final URL url = new URL(m_serviceURL + servlet + "?delete"); + conn = (HttpURLConnection) url.openConnection(); + conn.setRequestMethod("POST"); conn.setDoOutput(true); conn.setDoInput(true); conn.setUseCaches(false); Deleted: branches/QUADS_QUERY_BRANCH/bigdata-war/WEB-INF/web.xml =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-war/WEB-INF/web.xml 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/bigdata-war/WEB-INF/web.xml 2011-05-24 17:17:49 UTC (rev 4547) @@ -1,68 +0,0 @@ -<!DOCTYPE web-app PUBLIC "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN" "http://java.sun.com/dtd/web-app_2_3.dtd"> -<web-app> - <display-name>Bigdata</display-name> - <description>Bigdata</description> - <context-param> - <param-name>property-file</param-name> - <param-value>bigdata/RWStore.properties</param-value> - <description>The property file (for a standalone database instance) or the - jini configuration file (for a federation). The file MUST end with either - ".properties" or ".config". When deploying a web application, the bundled - property files are located in the root of the "bigdata" WAR and are located - as "bigdata/RWStore.properties", etc.</description> - </context-param> - <context-param> - <param-name>namespace</param-name> - <param-value>kb</param-value> - <description>The default bigdata namespace of for the triple or quad store - instance to be exposed.</description> - </context-param> - <context-param> - <param-name>create</param-name> - <param-value>true</param-value> - <description>When true a new triple or quads store instance will be created - if none is found at that namespace.</description> - </context-param> - <context-param> - <param-name>query-thread-pool-size</param-name> - <param-value>16</param-value> - <description>The size of the thread pool used to service SPARQL queries -OR- - ZERO (0) for an unbounded thread pool.</description> - </context-param> - <listener> - <listener-class>com.bigdata.rdf.sail.webapp.BigdataRDFServletContextListener</listener-class> - </listener> - <servlet> - <servlet-name>REST API</servlet-name> - <display-name>REST API</display-name> - <description>The REST API, including a SPARQL end point, as described at - https://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer - </description> - <servlet-class>com.bigdata.rdf.sail.webapp.RESTServlet</servlet-class> - <load-on-startup>0</load-on-startup> - </servlet> - <servlet> - <servlet-name>Status</servlet-name> - <display-name>Status</display-name> - <description>A status page.</description> - <servlet-class>com.bigdata.rdf.sail.webapp.StatusServlet</servlet-class> - </servlet> - <servlet> - <servlet-name>Counters</servlet-name> - <display-name>Performance counters</display-name> - <description>Performance counters.</description> - <servlet-class>com.bigdata.rdf.sail.webapp.CountersServlet</servlet-class> - </servlet> - <servlet-mapping> - <servlet-name>REST API</servlet-name> - <url-pattern>/bigdata</url-pattern> - </servlet-mapping> - <servlet-mapping> - <servlet-name>Status</servlet-name> - <url-pattern>/bigdata/status</url-pattern> - </servlet-mapping> - <servlet-mapping> - <servlet-name>Counters</servlet-name> - <url-pattern>/bigdata/counters</url-pattern> - </servlet-mapping> -</web-app> Added: branches/QUADS_QUERY_BRANCH/bigdata-war/src/html/index.html =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-war/src/html/index.html (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-war/src/html/index.html 2011-05-24 17:17:49 UTC (rev 4547) @@ -0,0 +1,39 @@ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" +"http://www.w3.org/TR/html4/loose.dtd"> +<html> +<head> +<meta http-equiv="Content-Type" content="text/html;charset=utf-8" > +<title>bigdata® NanoSpaqrlServer</title> +<!-- $Id$ --> +</head> +<body> + +<p> + +Welcome to bigdata®. Please consult the +<a href="https://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer"> +documentation </a> for information on using the NanoSparqlServer's REST Api. + +</p> + +<p> +The following URLs should be active when deployed in the default configuration: +</p> +<dl> +<dt>http://hostname:port/bigdata</dt> +<dd>This page.</dd> +<dt>http://hostname:port/bigdata/sparql/</dt> +<dd>The SPARQL REST API.</dd> +<dt>http://hostname:port/bigdata/status</dt> +<dd>A status page</dd> +<dt>http://hostname:port/bigdata/counters</dt> +<dd>A performance counters page</dd> +</dl> + +<p> +Where <i>hostname</i> is the name of this host and <i>port</i> is the port at +which this page was accessed. +</p> + +</body> +</html> \ No newline at end of file Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-war/src/html/index.html ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Modified: branches/QUADS_QUERY_BRANCH/bigdata-war/src/resources/WEB-INF/web.xml =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-war/src/resources/WEB-INF/web.xml 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/bigdata-war/src/resources/WEB-INF/web.xml 2011-05-24 17:17:49 UTC (rev 4547) @@ -62,7 +62,7 @@ </servlet> <servlet-mapping> <servlet-name>REST API</servlet-name> - <url-pattern>/</url-pattern> + <url-pattern>/sparql</url-pattern> </servlet-mapping> <servlet-mapping> <servlet-name>Status</servlet-name> Modified: branches/QUADS_QUERY_BRANCH/build.xml =================================================================== --- branches/QUADS_QUERY_BRANCH/build.xml 2011-05-24 14:40:20 UTC (rev 4546) +++ branches/QUADS_QUERY_BRANCH/build.xml 2011-05-24 17:17:49 UTC (rev 4547) @@ -229,7 +229,7 @@ from colt and fastutil which are required by the proceeding jars. The main advantage of the resulting jar is that the vast majority of fastutil is not necessary, and it is a 13M jar. - --> + <target name="autojar" description="Produce an expanded version of the bigdata jar which includes the data from the dsi-util and lgpl-utils jars and only @@ -244,14 +244,23 @@ ${bigdata.dir}/bigdata/lib/lgpl-utils*.jar " /> </java> + </target> --> + <!-- java autojar.jar -vo fastutil-stripped.jar -c fastutil.jar -Y bigdata.jar --> + <target name="autojar-strip-fastutil" depends="prepare" + description="Strip unused classes from fastutil."> + <java jar="src/build/autojar/autojar.jar" fork="true" failonerror="true"> + <arg line="-o ${build.dir}/fastutil-stripped.jar + -c ${bigdata.dir}/bigdata/lib/unimi/fastutil*.jar + -- + -Y ${build.dir}/lib/${version}.jar + -Y ${bigdata.dir}/bigdata/lib/dsi-util*.jar + -Y ${bigdata.dir}/bigdata/lib/lgpl-utils*.jar + " /> + </java> </target> <!--depends="bundleJar"--> - <!-- - TODO 13M of the resulting WAR is fastutils. We need to slim down that JAR to - just the classes that we actually use before deploying it. - --> - <target name="war" depends="autojar" + <target name="war" depends="bundleJar, autojar-strip-fastutil" description="Generates a WAR artifact."> <delete file="${build.dir}/bigdata.war"/> <echo message="Building WAR"/> @@ -262,15 +271,17 @@ <fileset dir="bigdata-war/src/jsp"/> <fileset dir="bigdata-war/src/images"/> <file file="bigdata-war/src/resources/RWStore.properties"/> - <!-- bigdata jar plus some dependencies as filtered by autojar. --> - <lib file="${build.dir}/bigdataPlus.jar"/> + <!-- bigdata jar plus some dependencies as filtered by autojar. + <lib file="${build.dir}/bigdataPlus.jar"/> --> + <!-- The stripped version of fasutil. --> + <lib file="${build.dir}/fastutil-stripped.jar"/> <lib dir="${build.dir}/lib"> - <!-- jars bundled into "bigdata-plus" by autojar. --> <exclude name="fastutil*.jar"/> + <!-- jars bundled into "bigdata-plus" by autojar. <exclude name="colt*.jar"/> <exclude name="dsi-util*.jar"/> <exclude name="lgpl-utils*.jar"/> - <exclude name="bigdata*.jar"/> + <exclude name="bigdata*.jar"/>--> <!-- jars which are not currently used. --> <exclude name="iris*.jar"/> <exclude name="jgrapht*.jar"/> Modified: branches/QUADS_QUERY_BRANCH/src/build/autojar/autojar.jar =================================================================== (Binary files differ) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-05-25 20:41:53
|
Revision: 4551 http://bigdata.svn.sourceforge.net/bigdata/?rev=4551&view=rev Author: mrpersonick Date: 2011-05-25 20:41:47 +0000 (Wed, 25 May 2011) Log Message: ----------- fixed a performance regression by optimizing a CompareBOp into a SameTermBOp when one of the operands is a Constant<URI> Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/SameTermBOp.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/SameTermBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/SameTermBOp.java 2011-05-25 20:25:52 UTC (rev 4550) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/SameTermBOp.java 2011-05-25 20:41:47 UTC (rev 4551) @@ -26,11 +26,16 @@ import java.util.Map; +import org.openrdf.query.algebra.Compare.CompareOp; + import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; import com.bigdata.bop.IValueExpression; +import com.bigdata.bop.NV; +import com.bigdata.bop.PipelineOp; import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.constraints.CompareBOp.Annotations; /** * Compare two terms for exact equality. @@ -42,13 +47,30 @@ */ private static final long serialVersionUID = 1L; + public interface Annotations extends PipelineOp.Annotations { + + /** + * The compare operator, which is a {@link CompareOp} enum value. + * Must be either EQ or NE. + */ + String OP = (CompareBOp.class.getName() + ".op").intern(); + + } + public SameTermBOp(final IValueExpression<? extends IV> left, final IValueExpression<? extends IV> right) { - this(new BOp[] { left, right }, null); + this(left, right, CompareOp.EQ); } + public SameTermBOp(final IValueExpression<? extends IV> left, + final IValueExpression<? extends IV> right, final CompareOp op) { + + this(new BOp[] { left, right }, NV.asMap(new NV(Annotations.OP, op))); + + } + /** * Required shallow copy constructor. */ @@ -59,6 +81,11 @@ if (args.length != 2 || args[0] == null || args[1] == null) throw new IllegalArgumentException(); + final CompareOp op = (CompareOp) getRequiredProperty(Annotations.OP); + + if (!(op == CompareOp.EQ || op == CompareOp.NE)) + throw new IllegalArgumentException(); + } /** @@ -77,7 +104,12 @@ if (left == null || right == null) throw new SparqlTypeErrorException(); - return left.equals(right); + final CompareOp op = (CompareOp) getRequiredProperty(Annotations.OP); + + switch(op) { + case NE: return !left.equals(right); + default: return left.equals(right); + } } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-05-25 20:25:52 UTC (rev 4550) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-05-25 20:41:47 UTC (rev 4551) @@ -1932,11 +1932,35 @@ } private IValueExpression<? extends IV> toVE(final Compare compare) { - final IValueExpression<? extends IV> iv1 = + final IValueExpression<? extends IV> left = toVE(compare.getLeftArg()); - final IValueExpression<? extends IV> iv2 = + final IValueExpression<? extends IV> right = toVE(compare.getRightArg()); - return new CompareBOp(iv1, iv2, compare.getOperator()); + + /* + * If the term is a Constant<URI> and the op is EQ or NE then we can + * do a sameTerm optimization. + */ + final CompareOp op = compare.getOperator(); + if (op == CompareOp.EQ || op == CompareOp.NE) { + + if (left instanceof Constant) { + final IV iv = ((Constant<? extends IV>) left).get(); + if (iv.isURI()) { + return new SameTermBOp(left, right, op); + } + } + + if (right instanceof Constant) { + final IV iv = ((Constant<? extends IV>) right).get(); + if (iv.isURI()) { + return new SameTermBOp(left, right, op); + } + } + + } + + return new CompareBOp(left, right, compare.getOperator()); } private IValueExpression<? extends IV> toVE(final Bound bound) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-02 16:59:40
|
Revision: 4606 http://bigdata.svn.sourceforge.net/bigdata/?rev=4606&view=rev Author: thompsonbry Date: 2011-06-02 16:59:34 +0000 (Thu, 02 Jun 2011) Log Message: ----------- This turned out to be a relatively simple issue. The ant 'bundle' task now flattens the directory structure of the jars. This was causing a problem for the cluster deployment since the jini jars were divided into three directories (jini/lib, jini/lib-dl, and jini-lib-ext). The solution was simply to divide the "bundle" task into two "copy" operations. One flattens most of the jars, but does not handle the jars which are specific to the cluster deployment. The other copies the jars for the cluster deployment, maintaining their directory hierarchy. This issue only effected the 'ant-install-artifact' task and service startup when using "bigdata 'start'". It would not have effected the alternative install procedure which is based on the REL.<version>.tgz artifact since that does not rely on the 'bundle' target. I also modified bigdataStandalone.config to not override the class path for zookeeper. Doing so is only asking for problems since then you have to maintain the right version information for the zookeeper dependencies in the config file as well. See https://sourceforge.net/apps/trac/bigdata/ticket/314 Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/build.xml branches/QUADS_QUERY_BRANCH/src/resources/config/bigdataStandalone.config Modified: branches/QUADS_QUERY_BRANCH/build.xml =================================================================== --- branches/QUADS_QUERY_BRANCH/build.xml 2011-06-02 14:19:18 UTC (rev 4605) +++ branches/QUADS_QUERY_BRANCH/build.xml 2011-06-02 16:59:34 UTC (rev 4606) @@ -199,7 +199,7 @@ </target> <target name="bundle" description="Bundles all dependencies for easier deployments and releases (does not bundle the bigdata jar)."> -<copy toDir="${build.dir}/lib" flatten="true"> + <copy toDir="${build.dir}/lib" flatten="true"> <fileset dir="${bigdata.dir}/bigdata/lib"> <include name="**/*.jar" /> <include name="**/*.so" /> @@ -211,16 +211,20 @@ <!-- The BytesUtil JNI class is not recommended at this time (no performance gain). --> <exclude name="**/*BytesUtil*" /> </fileset> - <fileset dir="${bigdata.dir}/bigdata-jini/lib"> - <include name="**/*.jar" /> - </fileset> <fileset dir="${bigdata.dir}/bigdata-rdf/lib"> <include name="**/*.jar" /> </fileset> <fileset dir="${bigdata.dir}/bigdata-sails/lib"> <include name="**/*.jar" /> </fileset> -</copy> + </copy> + <!-- Do NOT flatten the jini jars. We need the to preserve the --> + <!-- lib, lib-dl, and lib-ext distinctions. --> + <copy toDir="${build.dir}/lib" flatten="false"> + <fileset dir="${bigdata.dir}/bigdata-jini/lib"> + <include name="**/*.jar" /> + </fileset> + </copy> </target> <!-- @@ -1026,6 +1030,8 @@ <target name="ant-install-prepare" depends="jar, bundle" description="Stage all files (src, lib, config, etc.) needed for ant based install."> + + <!-- stage source files. --> <copy toDir="${build.dir}/bigdata/src"> <fileset dir="${bigdata.dir}/bigdata/src" /> </copy> @@ -1045,11 +1051,11 @@ <fileset dir="${bigdata.dir}/lgpl-utils/src" /> </copy> + <!-- stage library dependencies. --> <mkdir dir="${build.dir}/bigdata/lib" /> <copy toDir="${build.dir}/bigdata/lib"> <fileset dir="${bigdata.dir}/bigdata/lib" /> </copy> - <mkdir dir="${build.dir}/bigdata-jini/lib" /> <copy toDir="${build.dir}/bigdata-jini/lib"> <fileset dir="${bigdata.dir}/bigdata-jini/lib" /> @@ -1058,7 +1064,6 @@ <copy toDir="${build.dir}/bigdata-rdf/lib"> <fileset dir="${bigdata.dir}/bigdata-rdf/lib" /> </copy> - <mkdir dir="${build.dir}/bigdata-sails/lib" /> <copy toDir="${build.dir}/bigdata-sails/lib"> <fileset dir="${bigdata.dir}/bigdata-sails/lib" /> Modified: branches/QUADS_QUERY_BRANCH/src/resources/config/bigdataStandalone.config =================================================================== --- branches/QUADS_QUERY_BRANCH/src/resources/config/bigdataStandalone.config 2011-06-02 14:19:18 UTC (rev 4605) +++ branches/QUADS_QUERY_BRANCH/src/resources/config/bigdataStandalone.config 2011-06-02 16:59:34 UTC (rev 4606) @@ -499,11 +499,11 @@ // + ",3="+bigdata.zoo3+":2888:3888" ; - // This is all you need to run zookeeper. - classpath = new String[] { - "@LIB_DIR@/apache/zookeeper-3.2.1.jar", - "@LIB_DIR@/apache/log4j-1.2.15.jar" - }; + // This is all you need to run zookeeper, but then you have to keep the version numbers up to date. +// classpath = new String[] { +// "@LIB_DIR@/apache/zookeeper-3.2.1.jar", +// "@LIB_DIR@/apache/log4j-1.2.15.jar" +// }; /* Optional command line arguments for the JVM used to execute * zookeeper. This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-03 13:19:34
|
Revision: 4616 http://bigdata.svn.sourceforge.net/bigdata/?rev=4616&view=rev Author: thompsonbry Date: 2011-06-03 13:19:28 +0000 (Fri, 03 Jun 2011) Log Message: ----------- Back ported logic to enable CI without services. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/build.properties branches/QUADS_QUERY_BRANCH/build.xml Modified: branches/QUADS_QUERY_BRANCH/build.properties =================================================================== --- branches/QUADS_QUERY_BRANCH/build.properties 2011-06-03 13:16:31 UTC (rev 4615) +++ branches/QUADS_QUERY_BRANCH/build.properties 2011-06-03 13:19:28 UTC (rev 4616) @@ -28,6 +28,13 @@ #javac.source=1.6 javac.encoding=Cp1252 +# Set to false to NOT start services (zookeeper, lookup server, class server, etc). +# When false, tests which depend on those services will not run. (This can also be +# set by CI if you leave if undefined here.) For example: +# export skipTestServices=true +# ant -DskipTestServices=${skipTestServices} ... +#skipTestServices=false + ## # Properties for creating a release. ## @@ -380,5 +387,6 @@ # CI properties. These must agree with the actual installation directory and zoo.cfg # file for the zookeeper instance used to run CI. test.zookeeper.installDir=/Users/bryan/zookeeper-3.2.1 +#test.zookeeper.installDir=/usr/java/zookeeper-3.2.1 test.zookeeper.tickTime=2000 test.zookeeper.clientPort=2081 Modified: branches/QUADS_QUERY_BRANCH/build.xml =================================================================== --- branches/QUADS_QUERY_BRANCH/build.xml 2011-06-03 13:16:31 UTC (rev 4615) +++ branches/QUADS_QUERY_BRANCH/build.xml 2011-06-03 13:19:28 UTC (rev 4616) @@ -1637,26 +1637,45 @@ <!-- Issue stop on zookeeper first since zkServer leaves a pid file --> <!-- around if the JVM is killed. That pid file needs to be cleaned --> <!-- up before we can start a new instance. --> - <antcall target="stopZookeeper" /> + <!-- + <antcall target="stopZookeeper"/> <antcall target="stopLookup" /> <antcall target="stopHttpd" /> - - <antcall target="startZookeeper" /> - <antcall target="startHttpd" /> - <antcall target="startLookup" /> - + + <antcall target="startZookeeper"/> + <antcall target="startHttpd"/> + <antcall target="startLookup"/> + --> <!-- Run the tests --> + <antcall target="stopTestServices"/> + <antcall target="startTestServices"/> <antcall target="run-junit" /> - + <antcall target="stopTestServices"/> + <!-- <antcall target="stopLookup" /> <antcall target="stopHttpd" /> <antcall target="stopZookeeper" /> + --> <!-- This message is noticed by the hudson build and is used to trigger after various after actions. --> <echo>JUNIT RUN COMPLETE</echo> </target> + <target name="stopTestServices" unless="${skipTestServices}"> + <echo message="Stopping test services."/> + <antcall target="stopZookeeper"/> + <antcall target="stopLookup" /> + <antcall target="stopHttpd" /> + </target> + + <target name="startTestServices" unless="${skipTestServices}"> + <echo message="Starting test services."/> + <antcall target="startZookeeper"/> + <antcall target="startHttpd"/> + <antcall target="startLookup"/> + </target> + <target name="startHttpd"> <echo>java -jar ${dist.lib}/classserver.jar -verbose -stoppable -port ${test.codebase.port} -dir ${test.codebase.dir} </echo> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-06-06 14:11:30
|
Revision: 4630 http://bigdata.svn.sourceforge.net/bigdata/?rev=4630&view=rev Author: mrpersonick Date: 2011-06-06 14:11:22 +0000 (Mon, 06 Jun 2011) Log Message: ----------- lex joins and sesame 1.0 operators Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/IVUtility.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconKeyOrder.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconRelation.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DatatypeBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FuncBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/CacheValueFilter.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestLexJoinOps.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/IVUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/IVUtility.java 2011-06-06 12:36:01 UTC (rev 4629) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/IVUtility.java 2011-06-06 14:11:22 UTC (rev 4630) @@ -215,7 +215,7 @@ // fixed length numerics if (dte1.isFloatingPointNumeric() || dte2.isFloatingPointNumeric()) { // non-BigDecimal floating points - if (dte1 == DTE.XSDFloat && dte2 == DTE.XSDFloat) + if (dte1 == DTE.XSDFloat || dte2 == DTE.XSDFloat) return numericalMath(num1.floatValue(), num2.floatValue(), op); else return numericalMath(num1.doubleValue(), num2.doubleValue(), op); Added: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DatatypeBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DatatypeBOp.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DatatypeBOp.java 2011-06-06 14:11:22 UTC (rev 4630) @@ -0,0 +1,185 @@ +/* + +Copyright (C) SYSTAP, LLC 2006-2007. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +*/ +package com.bigdata.rdf.internal.constraints; + +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; + +import org.apache.log4j.Logger; + +import com.bigdata.bop.BOp; +import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IValueExpression; +import com.bigdata.bop.IVariable; +import com.bigdata.bop.NV; +import com.bigdata.rdf.error.SparqlTypeErrorException; +import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.NotMaterializedException; +import com.bigdata.rdf.internal.TermId; +import com.bigdata.rdf.internal.VTE; +import com.bigdata.rdf.internal.XSD; +import com.bigdata.rdf.model.BigdataLiteral; +import com.bigdata.rdf.model.BigdataURI; +import com.bigdata.rdf.model.BigdataValue; +import com.bigdata.rdf.model.BigdataValueFactory; +import com.bigdata.rdf.model.BigdataValueFactoryImpl; + +/** + * Return the datatype of the literal argument. + */ +public class DatatypeBOp extends IVValueExpression<IV> + implements INeedsMaterialization { + + /** + * + */ + private static final long serialVersionUID = 7391999162162545704L; + + private static final transient Logger log = Logger.getLogger(DatatypeBOp.class); + + + public interface Annotations extends BOp.Annotations { + + String NAMESPACE = (DatatypeBOp.class.getName() + ".namespace").intern(); + + } + + public DatatypeBOp(final IValueExpression<? extends IV> x, final String lex) { + + this(new BOp[] { x }, + NV.asMap(new NV(Annotations.NAMESPACE, lex))); + + } + + /** + * Required shallow copy constructor. + */ + public DatatypeBOp(final BOp[] args, final Map<String, Object> anns) { + + super(args, anns); + + if (args.length != 1 || args[0] == null) + throw new IllegalArgumentException(); + + if (getProperty(Annotations.NAMESPACE) == null) + throw new IllegalArgumentException(); + + } + + /** + * Required deep copy constructor. + */ + public DatatypeBOp(final DatatypeBOp op) { + super(op); + } + + public IV get(final IBindingSet bs) { + + final IV iv = get(0).get(bs); + + if (log.isDebugEnabled()) { + log.debug(iv); + } + + // not yet bound + if (iv == null) + throw new SparqlTypeErrorException(); + + final BigdataValue val = iv.getValue(); + + if (val == null) + throw new NotMaterializedException(); + + if (val instanceof BigdataLiteral) { + + final BigdataLiteral literal = (BigdataLiteral) val; + + final BigdataURI datatype; + + if (literal.getDatatype() != null) { + + // literal with datatype + datatype = literal.getDatatype(); + + } else if (literal.getLanguage() == null) { + + // simple literal + final String namespace = (String) + getRequiredProperty(Annotations.NAMESPACE); + + final BigdataValueFactory vf = + BigdataValueFactoryImpl.getInstance(namespace); + + datatype = vf.asValue(XSD.STRING); + + } else { + + throw new SparqlTypeErrorException(); + + } + + IV datatypeIV = datatype.getIV(); + + if (datatypeIV == null) { + + datatypeIV = new TermId(VTE.valueOf(val), TermId.NULL); + datatype.setIV(datatypeIV); + + } + + // cache the value on the IV + datatypeIV.setValue(datatype); + + return datatypeIV; + + } + + throw new SparqlTypeErrorException(); + + } + + private volatile transient Set<IVariable<IV>> terms; + + public Set<IVariable<IV>> getTermsToMaterialize() { + + if (terms == null) { + + terms = new LinkedHashSet<IVariable<IV>>(); + + for (BOp bop : args()) { + + if (bop instanceof IVariable) + terms.add((IVariable<IV>) bop); + + } + + } + + return terms; + + } + +} Added: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FuncBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FuncBOp.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FuncBOp.java 2011-06-06 14:11:22 UTC (rev 4630) @@ -0,0 +1,195 @@ +/* + +Copyright (C) SYSTAP, LLC 2006-2007. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +*/ +package com.bigdata.rdf.internal.constraints; + +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.apache.log4j.Logger; +import org.openrdf.model.Value; +import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException; +import org.openrdf.query.algebra.evaluation.function.Function; +import org.openrdf.query.algebra.evaluation.function.FunctionRegistry; + +import com.bigdata.bop.BOp; +import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IValueExpression; +import com.bigdata.bop.IVariable; +import com.bigdata.bop.NV; +import com.bigdata.rdf.error.SparqlTypeErrorException; +import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.NotMaterializedException; +import com.bigdata.rdf.internal.TermId; +import com.bigdata.rdf.internal.VTE; +import com.bigdata.rdf.model.BigdataValue; +import com.bigdata.rdf.model.BigdataValueFactory; +import com.bigdata.rdf.model.BigdataValueFactoryImpl; + +/** + * Call one of the Sesame casting functions. + */ +public class FuncBOp extends IVValueExpression<IV> + implements INeedsMaterialization { + + /** + * + */ + private static final long serialVersionUID = 2587499644967260639L; + + private static final transient Logger log = Logger.getLogger(FuncBOp.class); + + + public interface Annotations extends BOp.Annotations { + + String NAMESPACE = (FuncBOp.class.getName() + ".namespace").intern(); + + String FUNCTION = (FuncBOp.class.getName() + ".function").intern(); + + } + + public FuncBOp(final IValueExpression<? extends IV>[] args, + final String func, final String lex) { + + this(args, NV.asMap( + new NV(Annotations.NAMESPACE, lex), + new NV(Annotations.FUNCTION, func))); + + } + + /** + * Required shallow copy constructor. + */ + public FuncBOp(final BOp[] args, final Map<String, Object> anns) { + + super(args, anns); + + if (getProperty(Annotations.NAMESPACE) == null) + throw new IllegalArgumentException(); + + if (getProperty(Annotations.FUNCTION) == null) + throw new IllegalArgumentException(); + + } + + /** + * Required deep copy constructor. + */ + public FuncBOp(final FuncBOp op) { + super(op); + } + + public IV get(final IBindingSet bs) { + + final List<BOp> args = args(); + + final Value[] vals = new Value[args.size()]; + + for (int i = 0; i < vals.length; i++) { + + final IV iv = get(i).get(bs); + + if (log.isDebugEnabled()) { + log.debug(iv); + } + + // not yet bound + if (iv == null) + throw new SparqlTypeErrorException(); + + final BigdataValue val = iv.getValue(); + + if (val == null) + throw new NotMaterializedException(); + + vals[i] = val; + + } + + final String funcName = + (String) getRequiredProperty(Annotations.FUNCTION); + + final Function func = FunctionRegistry.getInstance().get(funcName); + + if (func == null) { + throw new RuntimeException("Unknown function '" + funcName + "'"); + } + + final String namespace = (String) + getRequiredProperty(Annotations.NAMESPACE); + + final BigdataValueFactory vf = + BigdataValueFactoryImpl.getInstance(namespace); + + try { + + final BigdataValue val = (BigdataValue) func.evaluate(vf, vals); + + IV iv = val.getIV(); + + if (iv == null) { + + iv = new TermId(VTE.valueOf(val), TermId.NULL); + + val.setIV(iv); + + } + + // cache the value on the IV + iv.setValue(val); + + return iv; + + } catch (ValueExprEvaluationException ex) { + + throw new SparqlTypeErrorException(); + + } + + } + + private volatile transient Set<IVariable<IV>> terms; + + public Set<IVariable<IV>> getTermsToMaterialize() { + + if (terms == null) { + + terms = new LinkedHashSet<IVariable<IV>>(); + + for (BOp bop : args()) { + + if (bop instanceof IVariable) + terms.add((IVariable<IV>) bop); + + } + + } + + return terms; + + } + +} Added: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java 2011-06-06 14:11:22 UTC (rev 4630) @@ -0,0 +1,174 @@ +/* + +Copyright (C) SYSTAP, LLC 2006-2007. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +*/ +package com.bigdata.rdf.internal.constraints; + +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; + +import org.apache.log4j.Logger; + +import com.bigdata.bop.BOp; +import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IValueExpression; +import com.bigdata.bop.IVariable; +import com.bigdata.bop.NV; +import com.bigdata.rdf.error.SparqlTypeErrorException; +import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.NotMaterializedException; +import com.bigdata.rdf.internal.TermId; +import com.bigdata.rdf.internal.VTE; +import com.bigdata.rdf.internal.XSD; +import com.bigdata.rdf.model.BigdataLiteral; +import com.bigdata.rdf.model.BigdataURI; +import com.bigdata.rdf.model.BigdataValue; +import com.bigdata.rdf.model.BigdataValueFactory; +import com.bigdata.rdf.model.BigdataValueFactoryImpl; + +/** + * Return the language tag of the literal argument. + */ +public class LangBOp extends IVValueExpression<IV> + implements INeedsMaterialization { + + /** + * + */ + private static final long serialVersionUID = 7391999162162545704L; + + private static final transient Logger log = Logger.getLogger(LangBOp.class); + + + public interface Annotations extends BOp.Annotations { + + String NAMESPACE = (LangBOp.class.getName() + ".namespace").intern(); + + } + + public LangBOp(final IValueExpression<? extends IV> x, final String lex) { + + this(new BOp[] { x }, + NV.asMap(new NV(Annotations.NAMESPACE, lex))); + + } + + /** + * Required shallow copy constructor. + */ + public LangBOp(final BOp[] args, final Map<String, Object> anns) { + + super(args, anns); + + if (args.length != 1 || args[0] == null) + throw new IllegalArgumentException(); + + if (getProperty(Annotations.NAMESPACE) == null) + throw new IllegalArgumentException(); + + } + + /** + * Required deep copy constructor. + */ + public LangBOp(final LangBOp op) { + super(op); + } + + public IV get(final IBindingSet bs) { + + final IV iv = get(0).get(bs); + + if (log.isDebugEnabled()) { + log.debug(iv); + } + + // not yet bound + if (iv == null) + throw new SparqlTypeErrorException(); + + final BigdataValue val = iv.getValue(); + + if (val == null) + throw new NotMaterializedException(); + + if (val instanceof BigdataLiteral) { + + final BigdataLiteral literal = (BigdataLiteral) val; + + String langTag = literal.getLanguage(); + if (langTag == null) { + langTag = ""; + } + + final String namespace = (String) + getRequiredProperty(Annotations.NAMESPACE); + + final BigdataValueFactory vf = + BigdataValueFactoryImpl.getInstance(namespace); + + final BigdataValue lang = vf.createLiteral(langTag); + + IV langIV = lang.getIV(); + + if (langIV == null) { + + langIV = new TermId(VTE.LITERAL, TermId.NULL); + lang.setIV(langIV); + + } + + // cache the value on the IV + langIV.setValue(lang); + + return langIV; + + } + + throw new SparqlTypeErrorException(); + + } + + private volatile transient Set<IVariable<IV>> terms; + + public Set<IVariable<IV>> getTermsToMaterialize() { + + if (terms == null) { + + terms = new LinkedHashSet<IVariable<IV>>(); + + for (BOp bop : args()) { + + if (bop instanceof IVariable) + terms.add((IVariable<IV>) bop); + + } + + } + + return terms; + + } + +} Added: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java 2011-06-06 14:11:22 UTC (rev 4630) @@ -0,0 +1,152 @@ +/* + +Copyright (C) SYSTAP, LLC 2006-2007. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +*/ +package com.bigdata.rdf.internal.constraints; + +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; + +import org.apache.log4j.Logger; +import org.openrdf.model.Literal; +import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; + +import com.bigdata.bop.BOp; +import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IValueExpression; +import com.bigdata.bop.IVariable; +import com.bigdata.rdf.error.SparqlTypeErrorException; +import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.NotMaterializedException; +import com.bigdata.rdf.model.BigdataValue; + +/** + * Implements the langMatches SPARQL operator. + */ +public class LangMatchesBOp extends XSDBooleanIVValueExpression + implements INeedsMaterialization { + + /** + * + */ + private static final long serialVersionUID = 5910711647357240974L; + + private static final transient Logger log = Logger.getLogger(LangMatchesBOp.class); + + + public LangMatchesBOp(final IValueExpression<? extends IV> tag, + final IValueExpression<? extends IV> range) { + + this(new BOp[] { tag, range }, null/*annocations*/); + + } + + /** + * Required shallow copy constructor. + */ + public LangMatchesBOp(final BOp[] args, final Map<String, Object> anns) { + + super(args, anns); + + if (args.length != 2 || args[0] == null || args[1] == null) + throw new IllegalArgumentException(); + + } + + /** + * Required deep copy constructor. + */ + public LangMatchesBOp(final LangMatchesBOp op) { + super(op); + } + + protected boolean accept(final IBindingSet bs) { + + final IV tag = get(0).get(bs); + final IV range = get(1).get(bs); + + if (log.isDebugEnabled()) { + log.debug(tag); + log.debug(range); + } + + // not yet bound + if (tag == null || range == null) + throw new SparqlTypeErrorException(); + + final BigdataValue tagVal = tag.getValue(); + final BigdataValue rangeVal = tag.getValue(); + + // not yet materialized + if (tagVal == null || rangeVal == null) + throw new NotMaterializedException(); + + if (QueryEvaluationUtil.isSimpleLiteral(tagVal) + && QueryEvaluationUtil.isSimpleLiteral(rangeVal)) + { + String langTag = ((Literal)tagVal).getLabel(); + String langRange = ((Literal)rangeVal).getLabel(); + + boolean result = false; + if (langRange.equals("*")) { + result = langTag.length() > 0; + } + else if (langTag.length() == langRange.length()) { + result = langTag.equalsIgnoreCase(langRange); + } + else if (langTag.length() > langRange.length()) { + // check if the range is a prefix of the tag + String prefix = langTag.substring(0, langRange.length()); + result = prefix.equalsIgnoreCase(langRange) && langTag.charAt(langRange.length()) == '-'; + } + + return result; + } + + throw new SparqlTypeErrorException(); + + } + + private volatile transient Set<IVariable<IV>> terms; + + public Set<IVariable<IV>> getTermsToMaterialize() { + + if (terms == null) { + + terms = new LinkedHashSet<IVariable<IV>>(); + + for (BOp bop : args()) { + + if (bop instanceof IVariable) + terms.add((IVariable<IV>) bop); + + } + + } + + return terms; + + } + +} Added: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/CacheValueFilter.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/CacheValueFilter.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/CacheValueFilter.java 2011-06-06 14:11:22 UTC (rev 4630) @@ -0,0 +1,91 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2010. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on Sep 28, 2010 + */ + +package com.bigdata.rdf.lexicon; + +import java.util.Map; + +import com.bigdata.bop.BOp; +import com.bigdata.bop.BOpBase; +import com.bigdata.bop.ap.filter.BOpResolver; +import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.model.BigdataValue; + +/** + * Cache the {@link BigdataValue} on the {@link IV} (create a cross linkage). + * This is useful for lexicon joins and SPARQL operators that need to use + * materialized RDF values. + */ +public class CacheValueFilter extends BOpResolver { + + /** + * + */ + private static final long serialVersionUID = -7267351719878117114L; + + /** + * A default instance. + */ + public static CacheValueFilter newInstance() { + return new CacheValueFilter(BOpBase.NOARGS, BOpBase.NOANNS); + } + + /** + * @param op + */ + public CacheValueFilter(CacheValueFilter op) { + super(op); + } + + /** + * @param args + * @param annotations + */ + public CacheValueFilter(BOp[] args, Map<String, Object> annotations) { + super(args, annotations); + } + + /** + * Cache the BigdataValue on its IV (cross-link). + */ + @Override + protected Object resolve(final Object obj) { + + final BigdataValue val = (BigdataValue) obj; + + // the link from BigdataValue to IV is pre-existing (set by the + // materialization of the index tuple) + final IV iv = val.getIV(); + + // cache the value on the IV + iv.setValue(val); + + return obj; + + } + +} Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconKeyOrder.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconKeyOrder.java 2011-06-06 12:36:01 UTC (rev 4629) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconKeyOrder.java 2011-06-06 14:11:22 UTC (rev 4630) @@ -5,10 +5,11 @@ import org.openrdf.model.Value; +import com.bigdata.btree.keys.IKeyBuilder; +import com.bigdata.rdf.internal.TermId; import com.bigdata.rdf.model.BigdataValue; import com.bigdata.rdf.model.BigdataValueIdComparator; import com.bigdata.striterator.AbstractKeyOrder; -import com.bigdata.striterator.IKeyOrder; /** * Natural index orders for the {@link LexiconRelation}. @@ -181,5 +182,26 @@ return LexiconKeyOrder.valueOf(index); } + + protected void appendKeyComponent(final IKeyBuilder keyBuilder, + final int i, final Object keyComponent) { + if (index == _TERM2ID) { + + final BigdataValue term = (BigdataValue) keyComponent; + final LexiconKeyBuilder lexKeyBuilder = + new LexiconKeyBuilder(keyBuilder); + lexKeyBuilder.value2Key(term); + + } else if (index == _ID2TERM) { + + final TermId id = (TermId) keyComponent; + id.encode(keyBuilder); + + } else { + throw new AssertionError(); + } + + } + } Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconRelation.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconRelation.java 2011-06-06 12:36:01 UTC (rev 4629) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexiconRelation.java 2011-06-06 14:11:22 UTC (rev 4630) @@ -59,6 +59,8 @@ import com.bigdata.bop.IBindingSet; import com.bigdata.bop.IPredicate; import com.bigdata.bop.IVariableOrConstant; +import com.bigdata.bop.ap.Predicate; +import com.bigdata.bop.ap.filter.BOpResolver; import com.bigdata.btree.BytesUtil; import com.bigdata.btree.IIndex; import com.bigdata.btree.IRangeQuery; @@ -98,9 +100,12 @@ import com.bigdata.rdf.model.BigdataValueFactoryImpl; import com.bigdata.rdf.rio.IStatementBuffer; import com.bigdata.rdf.rio.StatementBuffer; +import com.bigdata.rdf.spo.ISPO; +import com.bigdata.rdf.spo.SPO; import com.bigdata.rdf.store.AbstractTripleStore; import com.bigdata.rdf.store.IRawTripleStore; import com.bigdata.relation.AbstractRelation; +import com.bigdata.relation.accesspath.AccessPath; import com.bigdata.relation.accesspath.ArrayAccessPath; import com.bigdata.relation.accesspath.IAccessPath; import com.bigdata.relation.accesspath.IElementFilter; @@ -2923,18 +2928,39 @@ } final BigdataValue val = term.get(); + + // see if it already has an IV or can be assigned an inline IV + IV iv = val.getIV(); + if (iv == null) { + iv = getInlineIV(val); + } + + if (iv != null) { + + // cache the IV on the value + val.setIV(iv); + + // cache the value on the IV + iv.setValue(val); + + return new ArrayAccessPath<BigdataValue>(new BigdataValue[] { val }, + predicate, keyOrder); + + } + + final CacheValueFilter filter = CacheValueFilter.newInstance(); - final IV iv = getIV(val); + final IPredicate<BigdataValue> tmp = (IPredicate<BigdataValue>) + predicate.setProperty( + Predicate.Annotations.ACCESS_PATH_FILTER, filter + ); - // cache the IV on the value - val.setIV(iv); + AccessPath<BigdataValue> ap = new AccessPath<BigdataValue>( + this, localIndexManager, tmp, keyOrder + ); - // cache the value on the IV - iv.setValue(val); + return ap; - return new ArrayAccessPath<BigdataValue>(new BigdataValue[] { val }, - predicate, keyOrder); - } else if (keyOrder == LexiconKeyOrder.ID2TERM) { final IVariableOrConstant<IV> term = predicate.get(1); @@ -2947,17 +2973,40 @@ final IV iv = term.get(); - final BigdataValue val = getTerm(iv); + final BigdataValue val = termCache.get(iv); - // cache the IV on the value - val.setIV(iv); + if (val != null) { - // cache the value on the IV - iv.setValue(val); + if (log.isDebugEnabled()) + log.debug("found term in the term cache: " + val); + + // cache the IV on the value + val.setIV(iv); + + // cache the value on the IV + iv.setValue(val); + + return new ArrayAccessPath<BigdataValue>(new BigdataValue[] { val }, + predicate, keyOrder); + + } - return new ArrayAccessPath<BigdataValue>(new BigdataValue[] { val }, - predicate, keyOrder); + if (log.isDebugEnabled()) + log.debug("did not find term in the term cache: " + iv); + final CacheValueFilter filter = CacheValueFilter.newInstance(); + + final IPredicate<BigdataValue> tmp = (IPredicate<BigdataValue>) + predicate.setProperty( + Predicate.Annotations.ACCESS_PATH_FILTER, filter + ); + + final AccessPath<BigdataValue> ap = new AccessPath<BigdataValue>( + this, localIndexManager, tmp, keyOrder + ).init(); + + return ap; + } else { throw new IllegalArgumentException(); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-06-06 12:36:01 UTC (rev 4629) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-06-06 14:11:22 UTC (rev 4630) @@ -30,6 +30,7 @@ import org.openrdf.query.algebra.BNodeGenerator; import org.openrdf.query.algebra.Bound; import org.openrdf.query.algebra.Compare; +import org.openrdf.query.algebra.Compare.CompareOp; import org.openrdf.query.algebra.CompareAll; import org.openrdf.query.algebra.CompareAny; import org.openrdf.query.algebra.Datatype; @@ -63,6 +64,7 @@ import org.openrdf.query.algebra.Regex; import org.openrdf.query.algebra.SameTerm; import org.openrdf.query.algebra.StatementPattern; +import org.openrdf.query.algebra.StatementPattern.Scope; import org.openrdf.query.algebra.Str; import org.openrdf.query.algebra.TupleExpr; import org.openrdf.query.algebra.UnaryTupleOperator; @@ -70,8 +72,6 @@ import org.openrdf.query.algebra.ValueConstant; import org.openrdf.query.algebra.ValueExpr; import org.openrdf.query.algebra.Var; -import org.openrdf.query.algebra.Compare.CompareOp; -import org.openrdf.query.algebra.StatementPattern.Scope; import org.openrdf.query.algebra.evaluation.impl.EvaluationStrategyImpl; import org.openrdf.query.algebra.evaluation.iterator.FilterIterator; import org.openrdf.query.algebra.helpers.QueryModelVisitorBase; @@ -83,12 +83,12 @@ import com.bigdata.bop.IConstant; import com.bigdata.bop.IConstraint; import com.bigdata.bop.IPredicate; +import com.bigdata.bop.IPredicate.Annotations; import com.bigdata.bop.IValueExpression; import com.bigdata.bop.IVariable; import com.bigdata.bop.IVariableOrConstant; import com.bigdata.bop.NV; import com.bigdata.bop.PipelineOp; -import com.bigdata.bop.IPredicate.Annotations; import com.bigdata.bop.ap.Predicate; import com.bigdata.bop.bindingSet.ListBindingSet; import com.bigdata.bop.constraint.INBinarySearch; @@ -103,12 +103,17 @@ import com.bigdata.rdf.internal.VTE; import com.bigdata.rdf.internal.constraints.AndBOp; import com.bigdata.rdf.internal.constraints.CompareBOp; +import com.bigdata.rdf.internal.constraints.DatatypeBOp; import com.bigdata.rdf.internal.constraints.EBVBOp; +import com.bigdata.rdf.internal.constraints.FuncBOp; import com.bigdata.rdf.internal.constraints.IsBNodeBOp; import com.bigdata.rdf.internal.constraints.IsBoundBOp; import com.bigdata.rdf.internal.constraints.IsLiteralBOp; import com.bigdata.rdf.internal.constraints.IsURIBOp; +import com.bigdata.rdf.internal.constraints.LangBOp; +import com.bigdata.rdf.internal.constraints.LangMatchesBOp; import com.bigdata.rdf.internal.constraints.MathBOp; +import com.bigdata.rdf.internal.constraints.MathBOp.MathOp; import com.bigdata.rdf.internal.constraints.NotBOp; import com.bigdata.rdf.internal.constraints.OrBOp; import com.bigdata.rdf.internal.constraints.RangeBOp; @@ -116,16 +121,15 @@ import com.bigdata.rdf.internal.constraints.SPARQLConstraint; import com.bigdata.rdf.internal.constraints.SameTermBOp; import com.bigdata.rdf.internal.constraints.StrBOp; -import com.bigdata.rdf.internal.constraints.MathBOp.MathOp; import com.bigdata.rdf.lexicon.LexiconRelation; import com.bigdata.rdf.model.BigdataValue; import com.bigdata.rdf.sail.BigdataSail.Options; import com.bigdata.rdf.sail.sop.SOp; import com.bigdata.rdf.sail.sop.SOp2BOpUtility; import com.bigdata.rdf.sail.sop.SOpTree; +import com.bigdata.rdf.sail.sop.SOpTree.SOpGroup; import com.bigdata.rdf.sail.sop.SOpTreeBuilder; import com.bigdata.rdf.sail.sop.UnsupportedOperatorException; -import com.bigdata.rdf.sail.sop.SOpTree.SOpGroup; import com.bigdata.rdf.spo.DefaultGraphSolutionExpander; import com.bigdata.rdf.spo.ExplicitSPOFilter; import com.bigdata.rdf.spo.ISPO; @@ -777,49 +781,37 @@ * UnsupportedOperatorException here must just flow through * to Sesame evaluation of the entire query. */ -// if (op instanceof Regex) { -// final Regex regex = (Regex) op; -// final IPredicate bop = toPredicate(regex); -// sop.setBOp(bop); -// } else { - final ValueExpr ve = (ValueExpr) op; - final IConstraint bop = toConstraint(ve); - sop.setBOp(bop); -// } + final ValueExpr ve = (ValueExpr) op; + final IConstraint bop = toConstraint(ve); + sop.setBOp(bop); } else if (op instanceof Filter) { final Filter filter = (Filter) op; final ValueExpr ve = filter.getCondition(); - try { -// if (ve instanceof Regex) { -// final Regex regex = (Regex) ve; -// final IPredicate bop = toPredicate(regex); -// sop.setBOp(bop); -// } else { - final IConstraint bop = toConstraint(ve); - sop.setBOp(bop); -// } - } catch (UnsupportedOperatorException ex) { - /* - * If we encounter a sesame filter (ValueExpr) that we - * cannot translate, we can safely wrap the entire query - * with a Sesame filter iterator to capture that - * untranslatable value expression. If we are not in the - * root group however, we risk applying the filter to the - * wrong context (for example a filter inside an optional - * join group cannot be applied universally to the entire - * solution). In this case we must punt. - */ - if (sop.getGroup() == SOpTreeBuilder.ROOT_GROUP_ID) { - sopsToPrune.add(sop); - sesameFilters.add(filter); - } else { - /* - * Note: DO NOT wrap with a different exception type - - * the caller is looking for this. - */ - throw new UnsupportedOperatorException(ex); - } - } +// try { + final IConstraint bop = toConstraint(ve); + sop.setBOp(bop); +// } catch (UnsupportedOperatorException ex) { +// /* +// * If we encounter a sesame filter (ValueExpr) that we +// * cannot translate, we can safely wrap the entire query +// * with a Sesame filter iterator to capture that +// * untranslatable value expression. If we are not in the +// * root group however, we risk applying the filter to the +// * wrong context (for example a filter inside an optional +// * join group cannot be applied universally to the entire +// * solution). In this case we must punt. +// */ +// if (sop.getGroup() == SOpTreeBuilder.ROOT_GROUP_ID) { +// sopsToPrune.add(sop); +// sesameFilters.add(filter); +// } else { +// /* +// * Note: DO NOT wrap with a different exception type - +// * the caller is looking for this. +// */ +// throw new UnsupportedOperatorException(ex); +// } +// } } } @@ -1832,11 +1824,11 @@ } else if (ve instanceof Label) { throw new UnsupportedOperatorException(ve); } else if (ve instanceof Lang) { - throw new UnsupportedOperatorException(ve); + return toVE((Lang) ve); } else if (ve instanceof LangMatches) { - throw new UnsupportedOperatorException(ve); + return toVE((LangMatches) ve); } else if (ve instanceof Datatype) { - throw new UnsupportedOperatorException(ve); + return toVE((Datatype) ve); } else if (ve instanceof Namespace) { throw new UnsupportedOperatorException(ve); } else if (ve instanceof LocalName) { @@ -1854,7 +1846,7 @@ } else if (ve instanceof Like) { throw new UnsupportedOperatorException(ve); } else if (ve instanceof FunctionCall) { - throw new UnsupportedOperatorException(ve); + return toVE((FunctionCall) ve); } else if (ve instanceof And) { return toVE((And) ve); } else if (ve instanceof Or) { @@ -2014,6 +2006,36 @@ } } + private IValueExpression<? extends IV> toVE(final FunctionCall fc) { + final String lex = database.getLexiconRelation().getNamespace(); + final String func = fc.getURI(); + final List<ValueExpr> args = fc.getArgs(); + final IValueExpression<? extends IV>[] bops = + new IValueExpression[args.size()]; + for (int i = 0; i < bops.length; i++) { + bops[i] = toVE(args.get(i)); + } + return new FuncBOp(bops, func, lex); + } + + private IValueExpression<? extends IV> toVE(final Datatype dt) { + final String lex = database.getLexiconRelation().getNamespace(); + final IValueExpression<? extends IV> arg = toVE(dt.getArg()); + return new DatatypeBOp(arg, lex); + } + + private IValueExpression<? extends IV> toVE(final Lang lang) { + final String lex = database.getLexiconRelation().getNamespace(); + final IValueExpression<? extends IV> arg = toVE(lang.getArg()); + return new LangBOp(arg, lex); + } + + private IValueExpression<? extends IV> toVE(final LangMatches lm) { + final IValueExpression<? extends IV> tag = toVE(lm.getLeftArg()); + final IValueExpression<? extends IV> range = toVE(lm.getRightArg()); + return new LangMatchesBOp(tag, range); + } + /** * Generate a bigdata term from a Sesame term. * <p> Added: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestLexJoinOps.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestLexJoinOps.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestLexJoinOps.java 2011-06-06 14:11:22 UTC (rev 4630) @@ -0,0 +1,570 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2007. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ + +package com.bigdata.rdf.sail; + +import java.util.Properties; + +import org.apache.log4j.Logger; +import org.openrdf.model.Literal; +import org.openrdf.model.URI; +import org.openrdf.model.ValueFactory; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.RDFS; +import org.openrdf.query.QueryLanguage; +import org.openrdf.query.TupleQueryResult; +import org.openrdf.repository.Repository; +import org.openrdf.repository.RepositoryConnection; +import org.openrdf.repository.sail.SailRepository; +import org.openrdf.repository.sail.SailTupleQuery; +import org.openrdf.sail.Sail; +import org.openrdf.sail.memory.MemoryStore; + +import com.bigdata.rdf.axioms.NoAxioms; +import com.bigdata.rdf.internal.XSD; +import com.bigdata.rdf.store.BD; +import com.bigdata.rdf.vocab.NoVocabulary; + +public class TestLexJoinOps extends QuadsTestCase { + + protected static final Logger log = Logger.getLogger(TestLexJoinOps.class); + + protected static final boolean INFO = log.isInfoEnabled(); + + @Override + public Properties getProperties() { + + Properties props = super.getProperties(); + + props.setProperty(BigdataSail.Options.AXIOMS_CLASS, NoAxioms.class.getName()); + props.setProperty(BigdataSail.Options.VOCABULARY_CLASS, NoVocabulary.class.getName()); + props.setProperty(BigdataSail.Options.TRUTH_MAINTENANCE, "false"); + props.setProperty(BigdataSail.Options.JUSTIFY, "false"); + props.setProperty(BigdataSail.Options.TEXT_INDEX, "false"); + + return props; + + } + + /** + * + */ + public TestLexJoinOps() { + } + + /** + * @param arg0 + */ + public TestLexJoinOps(String arg0) { + super(arg0); + } + + public void testStr() throws Exception { + +// final Sail sail = new MemoryStore(); +// try { +// sail.initialize(); +// final Repository repo = new SailRepository(sail); + + final BigdataSail sail = getSail(); + try { + sail.initialize(); + final BigdataSailRepository repo = new BigdataSailRepository(sail); + + final RepositoryConnection cxn = repo.getConnection(); + + try { + cxn.setAutoCommit(false); + + final ValueFactory vf = sail.getValueFactory(); + + /* + * Create some terms. + */ + final URI X = vf.createURI(BD.NAMESPACE + "X"); + final URI dt = vf.createURI(BD.NAMESPACE + "myDatatype"); + final Literal _1 = vf.createLiteral("foo"); + final Literal _2 = vf.createLiteral("foo", XSD.STRING); + final Literal _3 = vf.createLiteral("foo", dt); + final Literal _4 = vf.createLiteral("foo", "EN"); + final Literal _5 = vf.createLiteral(true); + final Literal _6 = vf.createLiteral(1000l); + + /* + * Create some statements. + */ + cxn.add(X, RDF.TYPE, RDFS.RESOURCE); +// cxn.add(X, RDFS.LABEL, _1); + cxn.add(X, RDFS.LABEL, _2); + cxn.add(X, RDFS.LABEL, _3); + cxn.add(X, RDFS.LABEL, _4); + cxn.add(X, RDFS.LABEL, _5); + cxn.add(X, RDFS.LABEL, _6); + + /* + * Note: The either flush() or commit() is required to flush the + * statement buffers to the database before executing any operations + * that go around the sail. + */ + cxn.commit(); + + if (log.isInfoEnabled()) { + log.info(sail.getDatabase().dumpStore()); + } + + { + + String query = + QueryOptimizerEnum.queryHint(QueryOptimizerEnum.None) + + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix rdf: <"+RDF.NAMESPACE+"> " + + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + + + "select ?p ?o " + + "where { " + + " ?s rdf:type rdfs:Resource . " + +// " ?s ?p \"foo\" . " + + " ?s ?p ?o . " + +// " filter(str(?o) = \"foo\" && regex(str(?o),\"foo\",\"i\")) " + +// " filter(?o = \"foo\") " + + " filter(str(?o) = \"foo\") " + + " filter(str(?p) = \""+RDFS.LABEL+"\") " + + "}"; + + final SailTupleQuery tupleQuery = (SailTupleQuery) + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.setIncludeInferred(false /* includeInferred */); + + if (log.isInfoEnabled()) { + + log.info(query); + +// final BigdataSailTupleQuery bdTupleQuery = +// (BigdataSailTupleQuery) tupleQuery; +// final QueryRoot root = (QueryRoot) bdTupleQuery.getTupleExpr(); +// final Projection p = (Projection) root.getArg(); +// final TupleExpr tupleExpr = p.getArg(); +// final SOpTreeBuilder stb = new SOpTreeBuilder(); +// final SOpTree tree = stb.collectSOps(tupleExpr); + +// log.info(tree); +// log.info(query); + + final TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + log.info(result.next()); + } + + } + +// final Collection<BindingSet> answer = new LinkedList<BindingSet>(); +// answer.add(createBindingSet( +// new BindingImpl("a", paul), +// new BindingImpl("b", mary) +// )); +// answer.add(createBindingSet( +// new BindingImpl("a", brad), +// new BindingImpl("b", john) +// )); +// +// final TupleQueryResult result = tupleQuery.evaluate(); +// compare(result, answer); + + } + + } finally { + cxn.close(); + } + } finally { + if (sail instanceof BigdataSail) + ((BigdataSail)sail).__tearDownUnitTest();//shutDown(); + } + + } + + public void testRegex() throws Exception { + +// final Sail sail = new MemoryStore(); +// try { +// sail.initialize(); +// final Repository repo = new SailRepository(sail); + + final BigdataSail sail = getSail(); + try { + sail.initialize(); + final BigdataSailRepository repo = new BigdataSailRepository(sail); + + final RepositoryConnection cxn = repo.getConnection(); + + try { + cxn.setAutoCommit(false); + + final ValueFactory vf = sail.getValueFactory(); + + /* + * Create some terms. + */ + final URI X = vf.createURI(BD.NAMESPACE + "X"); + final URI dt = vf.createURI(BD.NAMESPACE + "myDatatype"); + final Literal _1 = vf.createLiteral("foo"); + final Literal _2 = vf.createLiteral("foo", XSD.STRING); + final Literal _3 = vf.createLiteral("foo", dt); + final Literal _4 = vf.createLiteral("foo", "EN"); + final Literal _5 = vf.createLiteral(true); + final Literal _6 = vf.createLiteral(1000l); + + /* + * Create some statements. + */ + cxn.add(X, RDF.TYPE, RDFS.RESOURCE); +// cxn.add(X, RDFS.LABEL, _1); + cxn.add(X, RDFS.LABEL, _2); + cxn.add(X, RDFS.LABEL, _3); + cxn.add(X, RDFS.LABEL, _4); + cxn.add(X, RDFS.LABEL, _5); + cxn.add(X, RDFS.LABEL, _6); + + /* + * Note: The either flush() or commit() is required to flush the + * statement buffers to the database before executing any operations + * that go around the sail. + */ + cxn.commit(); + + if (log.isInfoEnabled()) { + log.info(sail.getDatabase().dumpStore()); + } + + { + + String query = + QueryOptimizerEnum.queryHint(QueryOptimizerEnum.None) + + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix rdf: <"+RDF.NAMESPACE+"> " + + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + + + "select ?o " + + "where { " + + " ?s rdf:type rdfs:Resource . " + + " ?s ?p ?o . " + +// " filter(regex(str(?o), \"FOO\")) " + + " filter(regex(str(?o), \"FOO\", \"i\")) " + + "}"; + + final SailTupleQuery tupleQuery = (SailTupleQuery) + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.setIncludeInferred(false /* includeInferred */); + + if (log.isInfoEnabled()) { + + log.info(query); + +// final BigdataSailTupleQuery bdTupleQuery = +// (BigdataSailTupleQuery) tupleQuery; +// final QueryRoot root = (QueryRoot) bdTupleQuery.getTupleExpr(); +// final Projection p = (Projection) root.getArg(); +// final TupleExpr tupleExpr = p.getArg(); +// final SOpTreeBuilder stb = new SOpTreeBuilder(); +// final SOpTree tree = stb.collectSOps(tupleExpr); + +// log.info(tree); +// log.info(query); + + final TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + log.info(result.next()); + } + + } + +// final Collection<BindingSet> answer = new LinkedList<BindingSet>(); +// answer.add(createBindingSet( +// new BindingImpl("a", paul), +// new BindingImpl("b", mary) +// )); +// answer.add(createBindingSet( +// new BindingImpl("a", brad), +// new BindingImpl("b", john) +// )); +// +// final TupleQueryResult result = tupleQuery.evaluate(); +// compare(result, answer); + + } + + } finally { + cxn.close(); + } + } finally { + if (sail instanceof BigdataSail) + ((BigdataSail)sail).__tearDownUnitTest();//shutDown(); + } + + } + + /* + * PREFIX : <http://example.org/> +PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> +PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> +SELECT ?s WHERE { + ?s :p ?v . + FILTER(datatype(xsd:boolean(?v)) = xsd:boolean) . +} + */ + + public void testCastAndDatatype() throws Exception { + +// final Sail sail = new MemoryStore(); +// try { +// sail.initialize(); +// final Repository repo = new SailRepository(sail); + + final BigdataSail sail = getSail(); + try { + sail.initialize(); + final BigdataSailRepository repo = new BigdataSailRepository(sail); + + final RepositoryConnection cxn = repo.getConnection(); + + try { + cxn.setAutoCommit(false); + + final ValueFactory vf = sail.getValueFactory(); + + /* + * Create some terms. + */ + final URI X = vf.createURI(BD.NAMESPACE + "X"); + final URI dt = vf.createURI(BD.NAMESPACE + "myDatatype"); + final Literal _1 = vf.createLiteral("foo"); + final Literal _2 = vf.createLiteral("foo", XSD.STRING); + final Literal _3 = vf.createLiteral("foo", dt); + final Literal _4 = vf.createLiteral("foo", "EN"); + final Literal _5 = vf.createLiteral(true); + final Literal _6 = vf.createLiteral(1000l); + + /* + * Create some statements. + */ + cxn.add(X, RDF.TYPE, RDFS.RESOURCE); +// cxn.add(X, RDFS.LABEL, _1); + cxn.add(X, RDFS.LABEL, _2); + cxn.add(X, RDFS.LABEL, _3); + cxn.add(X, RDFS.LABEL, _4); + cxn.add(X, RDFS.LABEL, _5); + cxn.add(X, RDFS.LABEL, _6); + + /* + * Note: The either flush() or commit() is required to flush the + * statement buffers to the database before executing any operations + * that go around the sail. + */ + cxn.commit(); + + if (log.isInfoEnabled()) { + log.info(sail.getDatabase().dumpStore()); + } + + { + + String query = + QueryOptimizerEnum.queryHint(QueryOptimizerEnum.None) + + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix rdf: <"+RDF.NAMESPACE+"> " + + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + + "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> " + + + "select ?o " + + "where { " + + " ?s rdf:type rdfs:Resource . " + + " ?s ?p ?o . " + + " FILTER(datatype(xsd:boolean(?o)) = xsd:boolean) . " + + "}"; + + final SailTupleQuery tupleQuery = (SailTupleQuery) + cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); + tupleQuery.setIncludeInferred(false /* includeInferred */); + + if (log.isInfoEnabled()) { + + log.info(query); + +// final BigdataSailTupleQuery bdTupleQuery = +// (BigdataSailTupleQuery) tupleQuery; +// final QueryRoot root = (QueryRoot) bdTupleQuery.getTupleExpr(); +// final Projection p = (Projection) root.getArg(); +// final TupleExpr tupleExpr = p.getArg(); +// final SOpTreeBuilder stb = new SOpTreeBuilder(); +// final SOpTree tree = stb.collectSOps(tupleExpr); + +// log.info(tree); +// log.info(query); + + final TupleQueryResult result = tupleQuery.evaluate(); + while (result.hasNext()) { + log.info(result.next()); + } + + } + +// final Collection<BindingSet> answer = new LinkedList<BindingSet>(); +// answer.add(createBindingSet( +// new BindingImpl("a", paul), +// new BindingImpl("b", mary) +// )); +// answer.add(createBindingSet( +// new BindingImpl("a", brad), +// new BindingImpl("b", john) +// )); +// +// final TupleQueryResult result = tupleQuery.evaluate(); +// compare(result, answer); + + } + + } finally { + cxn.close(); + } + } finally { + if (sail instanceof BigdataSail) + ((BigdataSail)sail).__tearDownUnitTest();//shutDown(); + } + + } + + public void testLang() throws Exception { + +// final Sail sail = new MemoryStore(); +// try { +// sail.initialize(); +// final Repository repo = new SailRepository(sail); + + final BigdataSail sail = getSail(); + try { + sail.initialize(); + final BigdataSailRepository repo = new BigdataSailRepository(sail); + + final RepositoryConnection cxn = repo.getConnection(); + + try { + cxn.setAutoCommit(false); + + final ValueFactory vf = sail.getValueFactory(); + + /* + * Create some terms. + */ + final URI X = vf.createURI(BD.NAMESPACE + "X"); + final URI Y = vf.createURI(BD.NAMESPACE + "Y"); + final Literal _1 = vf.createLiteral("That Seventies Show","en"); + final Literal _2 = vf.createLiteral("Cette S\x8Erie des Ann\x8Ees Soixante-dix","fr"); + final Literal _3 = vf.createLiteral("Cette S\x8Erie des Ann\x8Ees Septante","fr-BE"); + final Literal _4 = vf.createLiteral("Il Buono, il Bruto, il Cattivo"); + + /* + * Create some statements. + */ + cxn.add(X, RDFS.LABEL, _1); + cxn.add(X, RDFS.LABEL, _2); + cxn.add(X, RDFS.LABEL, _3); + cxn.add(Y, RDFS.LABEL, _4); + + /* + * Note: The either flush() or commit() is required to flush the + * statement buffers to the database before executing any operations + * that go around the sail. + */ + cxn.commit(); + + if (log.isInfoEnabled()) { + log.info(sail.getDatabase().dumpStore()); + } + + { + + String query = + QueryOptimizerEnum.queryHint(QueryOptimizerEnum.None) + + "prefix bd: <"+BD.NAMESPACE+"> " + + "prefix rdf: <"+RDF.NAMESPACE+"> " + + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + + "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> " + + + "select ?title " + + "where { " + + " ?s rdfs:label \"That Seventies Show\"@en . " + + " ?s rdfs:label ?title . " ... [truncated message content] |
From: <mrp...@us...> - 2011-06-09 20:25:11
|
Revision: 4677 http://bigdata.svn.sourceforge.net/bigdata/?rev=4677&view=rev Author: mrpersonick Date: 2011-06-09 20:25:05 +0000 (Thu, 09 Jun 2011) Log Message: ----------- working through unit test failures Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEmbeddedFederationWithQuads.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithQuads.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithSids.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithoutSids.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestLexJoinOps.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java 2011-06-09 20:05:26 UTC (rev 4676) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java 2011-06-09 20:25:05 UTC (rev 4677) @@ -113,6 +113,10 @@ if (val == null) throw new NotMaterializedException(); + if (log.isDebugEnabled()) { + log.debug(val); + } + if (val instanceof BigdataLiteral) { final BigdataLiteral literal = (BigdataLiteral) val; Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java 2011-06-09 20:05:26 UTC (rev 4676) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java 2011-06-09 20:25:05 UTC (rev 4677) @@ -60,6 +60,11 @@ this(new BOp[] { tag, range }, null/*annocations*/); + if (log.isDebugEnabled()) { + log.info(tag); + log.info(range); + } + } /** @@ -96,8 +101,13 @@ throw new SparqlTypeErrorException(); final BigdataValue tagVal = tag.getValue(); - final BigdataValue rangeVal = tag.getValue(); + final BigdataValue rangeVal = range.getValue(); + if (log.isDebugEnabled()) { + log.debug(tagVal); + log.debug(rangeVal); + } + // not yet materialized if (tagVal == null || rangeVal == null) throw new NotMaterializedException(); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEmbeddedFederationWithQuads.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEmbeddedFederationWithQuads.java 2011-06-09 20:05:26 UTC (rev 4676) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailEmbeddedFederationWithQuads.java 2011-06-09 20:25:05 UTC (rev 4677) @@ -122,6 +122,8 @@ suite.addTestSuite(com.bigdata.rdf.sail.DavidsTestBOps.class); + suite.addTestSuite(com.bigdata.rdf.sail.TestLexJoinOps.class); + // The Sesame TCK, including the SPARQL test suite. { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithQuads.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithQuads.java 2011-06-09 20:05:26 UTC (rev 4676) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithQuads.java 2011-06-09 20:25:05 UTC (rev 4677) @@ -121,6 +121,8 @@ suite.addTestSuite(com.bigdata.rdf.sail.DavidsTestBOps.class); + suite.addTestSuite(com.bigdata.rdf.sail.TestLexJoinOps.class); + // The Sesame TCK, including the SPARQL test suite. { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithSids.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithSids.java 2011-06-09 20:05:26 UTC (rev 4676) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithSids.java 2011-06-09 20:25:05 UTC (rev 4677) @@ -101,6 +101,8 @@ suite.addTestSuite(com.bigdata.rdf.sail.TestTicket275.class); suite.addTestSuite(com.bigdata.rdf.sail.TestTicket276.class); + suite.addTestSuite(com.bigdata.rdf.sail.TestLexJoinOps.class); + return suite; } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithoutSids.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithoutSids.java 2011-06-09 20:05:26 UTC (rev 4676) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestBigdataSailWithoutSids.java 2011-06-09 20:25:05 UTC (rev 4677) @@ -95,6 +95,8 @@ suite.addTestSuite(com.bigdata.rdf.sail.TestTicket275.class); suite.addTestSuite(com.bigdata.rdf.sail.TestTicket276.class); + suite.addTestSuite(com.bigdata.rdf.sail.TestLexJoinOps.class); + return suite; } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestLexJoinOps.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestLexJoinOps.java 2011-06-09 20:05:26 UTC (rev 4676) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/TestLexJoinOps.java 2011-06-09 20:25:05 UTC (rev 4677) @@ -23,6 +23,8 @@ package com.bigdata.rdf.sail; +import java.util.Collection; +import java.util.LinkedList; import java.util.Properties; import org.apache.log4j.Logger; @@ -31,14 +33,12 @@ import org.openrdf.model.ValueFactory; import org.openrdf.model.vocabulary.RDF; import org.openrdf.model.vocabulary.RDFS; +import org.openrdf.query.BindingSet; import org.openrdf.query.QueryLanguage; import org.openrdf.query.TupleQueryResult; -import org.openrdf.repository.Repository; +import org.openrdf.query.impl.BindingImpl; import org.openrdf.repository.RepositoryConnection; -import org.openrdf.repository.sail.SailRepository; import org.openrdf.repository.sail.SailTupleQuery; -import org.openrdf.sail.Sail; -import org.openrdf.sail.memory.MemoryStore; import com.bigdata.rdf.axioms.NoAxioms; import com.bigdata.rdf.internal.XSD; @@ -114,7 +114,7 @@ * Create some statements. */ cxn.add(X, RDF.TYPE, RDFS.RESOURCE); -// cxn.add(X, RDFS.LABEL, _1); + cxn.add(X, RDFS.LABEL, _1); cxn.add(X, RDFS.LABEL, _2); cxn.add(X, RDFS.LABEL, _3); cxn.add(X, RDFS.LABEL, _4); @@ -140,15 +140,11 @@ "prefix rdf: <"+RDF.NAMESPACE+"> " + "prefix rdfs: <"+RDFS.NAMESPACE+"> " + - "select ?p ?o " + + "select ?o " + "where { " + " ?s rdf:type rdfs:Resource . " + -// " ?s ?p \"foo\" . " + " ?s ?p ?o . " + -// " filter(str(?o) = \"foo\" && regex(str(?o),\"foo\",\"i\")) " + -// " filter(?o = \"foo\") " + " filter(str(?o) = \"foo\") " + - " filter(str(?p) = \""+RDFS.LABEL+"\") " + "}"; final SailTupleQuery tupleQuery = (SailTupleQuery) @@ -159,17 +155,6 @@ log.info(query); -// final BigdataSailTupleQuery bdTupleQuery = -// (BigdataSailTupleQuery) tupleQuery; -// final QueryRoot root = (QueryRoot) bdTupleQuery.getTupleExpr(); -// final Projection p = (Projection) root.getArg(); -// final TupleExpr tupleExpr = p.getArg(); -// final SOpTreeBuilder stb = new SOpTreeBuilder(); -// final SOpTree tree = stb.collectSOps(tupleExpr); - -// log.info(tree); -// log.info(query); - final TupleQueryResult result = tupleQuery.evaluate(); while (result.hasNext()) { log.info(result.next()); @@ -177,19 +162,23 @@ } -// final Collection<BindingSet> answer = new LinkedList<BindingSet>(); -// answer.add(createBindingSet( -// new BindingImpl("a", paul), -// new BindingImpl("b", mary) -// )); -// answer.add(createBindingSet( -// new BindingImpl("a", brad), -// new BindingImpl("b", john) -// )); -// -// final TupleQueryResult result = tupleQuery.evaluate(); -// compare(result, answer); + final Collection<BindingSet> answer = new LinkedList<BindingSet>(); + answer.add(createBindingSet( + new BindingImpl("o", _1) + )); + answer.add(createBindingSet( + new BindingImpl("o", _2) + )); + answer.add(createBindingSet( + new BindingImpl("o", _3) + )); + answer.add(createBindingSet( + new BindingImpl("o", _4) + )); + final TupleQueryResult result = tupleQuery.evaluate(); + compare(result, answer); + } } finally { @@ -237,7 +226,7 @@ * Create some statements. */ cxn.add(X, RDF.TYPE, RDFS.RESOURCE); -// cxn.add(X, RDFS.LABEL, _1); + cxn.add(X, RDFS.LABEL, _1); cxn.add(X, RDFS.LABEL, _2); cxn.add(X, RDFS.LABEL, _3); cxn.add(X, RDFS.LABEL, _4); @@ -279,17 +268,6 @@ log.info(query); -// final BigdataSailTupleQuery bdTupleQuery = -// (BigdataSailTupleQuery) tupleQuery; -// final QueryRoot root = (QueryRoot) bdTupleQuery.getTupleExpr(); -// final Projection p = (Projection) root.getArg(); -// final TupleExpr tupleExpr = p.getArg(); -// final SOpTreeBuilder stb = new SOpTreeBuilder(); -// final SOpTree tree = stb.collectSOps(tupleExpr); - -// log.info(tree); -// log.info(query); - final TupleQueryResult result = tupleQuery.evaluate(); while (result.hasNext()) { log.info(result.next()); @@ -297,19 +275,23 @@ } -// final Collection<BindingSet> answer = new LinkedList<BindingSet>(); -// answer.add(createBindingSet( -// new BindingImpl("a", paul), -// new BindingImpl("b", mary) -// )); -// answer.add(createBindingSet( -// new BindingImpl("a", brad), -// new BindingImpl("b", john) -// )); -// -// final TupleQueryResult result = tupleQuery.evaluate(); -// compare(result, answer); + final Collection<BindingSet> answer = new LinkedList<BindingSet>(); + answer.add(createBindingSet( + new BindingImpl("o", _1) + )); + answer.add(createBindingSet( + new BindingImpl("o", _2) + )); + answer.add(createBindingSet( + new BindingImpl("o", _3) + )); + answer.add(createBindingSet( + new BindingImpl("o", _4) + )); + final TupleQueryResult result = tupleQuery.evaluate(); + compare(result, answer); + } } finally { @@ -367,7 +349,7 @@ * Create some statements. */ cxn.add(X, RDF.TYPE, RDFS.RESOURCE); -// cxn.add(X, RDFS.LABEL, _1); + cxn.add(X, RDFS.LABEL, _1); cxn.add(X, RDFS.LABEL, _2); cxn.add(X, RDFS.LABEL, _3); cxn.add(X, RDFS.LABEL, _4); @@ -409,17 +391,6 @@ log.info(query); -// final BigdataSailTupleQuery bdTupleQuery = -// (BigdataSailTupleQuery) tupleQuery; -// final QueryRoot root = (QueryRoot) bdTupleQuery.getTupleExpr(); -// final Projection p = (Projection) root.getArg(); -// final TupleExpr tupleExpr = p.getArg(); -// final SOpTreeBuilder stb = new SOpTreeBuilder(); -// final SOpTree tree = stb.collectSOps(tupleExpr); - -// log.info(tree); -// log.info(query); - final TupleQueryResult result = tupleQuery.evaluate(); while (result.hasNext()) { log.info(result.next()); @@ -427,19 +398,17 @@ } -// final Collection<BindingSet> answer = new LinkedList<BindingSet>(); -// answer.add(createBindingSet( -// new BindingImpl("a", paul), -// new BindingImpl("b", mary) -// )); -// answer.add(createBindingSet( -// new BindingImpl("a", brad), -// new BindingImpl("b", john) -// )); -// -// final TupleQueryResult result = tupleQuery.evaluate(); -// compare(result, answer); + final Collection<BindingSet> answer = new LinkedList<BindingSet>(); + answer.add(createBindingSet( + new BindingImpl("o", _5) + )); + answer.add(createBindingSet( + new BindingImpl("o", _6) + )); + final TupleQueryResult result = tupleQuery.evaluate(); + compare(result, answer); + } } finally { @@ -524,17 +493,6 @@ log.info(query); -// final BigdataSailTupleQuery bdTupleQuery = -// (BigdataSailTupleQuery) tupleQuery; -// final QueryRoot root = (QueryRoot) bdTupleQuery.getTupleExpr(); -// final Projection p = (Projection) root.getArg(); -// final TupleExpr tupleExpr = p.getArg(); -// final SOpTreeBuilder stb = new SOpTreeBuilder(); -// final SOpTree tree = stb.collectSOps(tupleExpr); - -// log.info(tree); -// log.info(query); - final TupleQueryResult result = tupleQuery.evaluate(); while (result.hasNext()) { log.info(result.next()); @@ -542,19 +500,17 @@ } -// final Collection<BindingSet> answer = new LinkedList<BindingSet>(); -// answer.add(createBindingSet( -// new BindingImpl("a", paul), -// new BindingImpl("b", mary) -// )); -// answer.add(createBindingSet( -// new BindingImpl("a", brad), -// new BindingImpl("b", john) -// )); -// -// final TupleQueryResult result = tupleQuery.evaluate(); -// compare(result, answer); + final Collection<BindingSet> answer = new LinkedList<BindingSet>(); + answer.add(createBindingSet( + new BindingImpl("title", _2) + )); + answer.add(createBindingSet( + new BindingImpl("title", _3) + )); + final TupleQueryResult result = tupleQuery.evaluate(); + compare(result, answer); + } } finally { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-06-10 17:15:00
|
Revision: 4690 http://bigdata.svn.sourceforge.net/bigdata/?rev=4690&view=rev Author: mrpersonick Date: 2011-06-10 17:14:53 +0000 (Fri, 10 Jun 2011) Log Message: ----------- Fixed the ebv bop to conform with sparql spec Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/EBVBOp.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/EBVBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/EBVBOp.java 2011-06-10 16:58:53 UTC (rev 4689) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/EBVBOp.java 2011-06-10 17:14:53 UTC (rev 4690) @@ -24,20 +24,28 @@ */ package com.bigdata.rdf.internal.constraints; +import java.util.LinkedHashSet; import java.util.Map; +import java.util.Set; +import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException; +import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil; + import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; import com.bigdata.bop.IValueExpression; +import com.bigdata.bop.IVariable; import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.internal.XSDBooleanIV; +import com.bigdata.rdf.model.BigdataValue; /** * Calculates the "effective boolean value" of an IValueExpression. See the * SPARQL spec for details. */ -public class EBVBOp extends XSDBooleanIVValueExpression { +public class EBVBOp extends XSDBooleanIVValueExpression + implements INeedsMaterialization { /** * @@ -106,12 +114,50 @@ final IV iv = get(0).get(bs); + // not yet bound + if (iv == null) { + + throw new SparqlTypeErrorException(); + + } + if (iv instanceof XSDBooleanIV) { return ((XSDBooleanIV) iv).booleanValue(); } - throw new SparqlTypeErrorException(); - + final BigdataValue val = iv.getValue(); + + try { + + return QueryEvaluationUtil.getEffectiveBooleanValue(val); + + } catch (ValueExprEvaluationException ex) { + + throw new SparqlTypeErrorException(); + + } + } + private volatile transient Set<IVariable<IV>> terms; + + public Set<IVariable<IV>> getTermsToMaterialize() { + + if (terms == null) { + + terms = new LinkedHashSet<IVariable<IV>>(); + + for (BOp bop : args()) { + + if (bop instanceof IVariable) + terms.add((IVariable<IV>) bop); + + } + + } + + return terms; + + } + } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java 2011-06-10 16:58:53 UTC (rev 4689) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/test/com/bigdata/rdf/sail/tck/BigdataSparqlTest.java 2011-06-10 17:14:53 UTC (rev 4690) @@ -211,6 +211,13 @@ // "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/type-promotion/manifest#type-promotion-29", // "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/type-promotion/manifest#type-promotion-30", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/boolean-effective-value/manifest#dawg-bev-1", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/boolean-effective-value/manifest#dawg-bev-2", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/boolean-effective-value/manifest#dawg-bev-3", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/boolean-effective-value/manifest#dawg-bev-4", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/boolean-effective-value/manifest#dawg-bev-5", +// "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/boolean-effective-value/manifest#dawg-bev-6", + // "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm-eq", // "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-01", This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-15 19:35:18
|
Revision: 4711 http://bigdata.svn.sourceforge.net/bigdata/?rev=4711&view=rev Author: thompsonbry Date: 2011-06-15 19:35:12 +0000 (Wed, 15 Jun 2011) Log Message: ----------- Modified build.xml to bundle the web app resources in the bigdata jar. This is per [1]. Some minor fixes to DumpJournal. These fixes are already in the TERMS branch. [1] http://sourceforge.net/apps/trac/bigdata/ticket/330 Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/journal/DumpJournal.java branches/QUADS_QUERY_BRANCH/build.xml Modified: branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/journal/DumpJournal.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/journal/DumpJournal.java 2011-06-15 16:25:39 UTC (rev 4710) +++ branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/journal/DumpJournal.java 2011-06-15 19:35:12 UTC (rev 4711) @@ -266,7 +266,7 @@ final CommitRecordIndex commitRecordIndex = journal .getCommitRecordIndex(); - System.err.println("There are " + commitRecordIndex + System.out.println("There are " + commitRecordIndex.getEntryCount() + " commit points."); if (dumpHistory) { @@ -452,6 +452,8 @@ System.out.print('\t'); System.out.print("nleaves"); System.out.print('\t'); + System.out.print("nentries"); + System.out.print('\t'); System.out.print("nodeBytes"); System.out.print('\t'); System.out.print("leafBytes"); @@ -489,6 +491,8 @@ System.out.print('\t'); System.out.print(ndx.getLeafCount()); System.out.print('\t'); + System.out.print(ndx.getEntryCount()); + System.out.print('\t'); System.out.print(stats.nodeBytes); System.out.print('\t'); System.out.print(stats.leafBytes); Modified: branches/QUADS_QUERY_BRANCH/build.xml =================================================================== --- branches/QUADS_QUERY_BRANCH/build.xml 2011-06-15 16:25:39 UTC (rev 4710) +++ branches/QUADS_QUERY_BRANCH/build.xml 2011-06-15 19:35:12 UTC (rev 4711) @@ -170,6 +170,8 @@ <target name="jar" depends="compile" description="Generates the jar (see also bundleJar)."> <jar destfile="${build.dir}/${version}.jar"> <fileset dir="${build.dir}/classes" excludes="test/**" /> + <!-- Copy WAR resources for the embedded NanoSparqlServer --> + <fileset dir="." includes="bigdata-war/src/**"/> <manifest> <!--<attribute name="Main-Class" value="com/bigdata/rdf/rio/TestRioIntegration"/>--> </manifest> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-06-17 19:58:47
|
Revision: 4730 http://bigdata.svn.sourceforge.net/bigdata/?rev=4730&view=rev Author: mrpersonick Date: 2011-06-17 19:58:40 +0000 (Fri, 17 Jun 2011) Log Message: ----------- changed the materialization pipeline to run constraints prior to materialization in certain cases Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/CompareBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DatatypeBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/EBVBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FuncBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/INeedsMaterialization.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/MathBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/RegexBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrBOp.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/CompareBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/CompareBOp.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/CompareBOp.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -41,6 +41,7 @@ import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.internal.IVUtility; +import com.bigdata.rdf.internal.NotMaterializedException; import com.bigdata.rdf.internal.TermId; import com.bigdata.rdf.model.BigdataValue; @@ -152,6 +153,9 @@ final BigdataValue val1 = left.getValue(); final BigdataValue val2 = right.getValue(); + if (val1 == null || val2 == null) + throw new NotMaterializedException(); + try { // use the Sesame implementation directly @@ -198,6 +202,18 @@ } + /** + * The CompareBOp can work with non-materialized terms in the case of + * inline numerical compare operations. It is only when the bop encounters + * non-inlined numerics or needs to compare strings that it needs + * materialized terms. + */ + public Requirement getRequirement() { + + return INeedsMaterialization.Requirement.SOMETIMES; + + } + private volatile transient Set<IVariable<IV>> terms; public Set<IVariable<IV>> getTermsToMaterialize() { @@ -206,13 +222,12 @@ terms = new LinkedHashSet<IVariable<IV>>(); - final IValueExpression<? extends IV> left = get(0); - if (left instanceof IVariable) - terms.add((IVariable<IV>) left); + for (BOp bop : args()) { + + if (bop instanceof IVariable) + terms.add((IVariable<IV>) bop); - final IValueExpression<? extends IV> right = get(1); - if (right instanceof IVariable) - terms.add((IVariable<IV>) right); + } } Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DatatypeBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DatatypeBOp.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/DatatypeBOp.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -41,6 +41,7 @@ import com.bigdata.rdf.internal.TermId; import com.bigdata.rdf.internal.VTE; import com.bigdata.rdf.internal.XSD; +import com.bigdata.rdf.internal.constraints.INeedsMaterialization.Requirement; import com.bigdata.rdf.model.BigdataLiteral; import com.bigdata.rdf.model.BigdataURI; import com.bigdata.rdf.model.BigdataValue; @@ -181,6 +182,15 @@ } + /** + * The DatatypeBOp can evaluate against unmaterialized inline numerics. + */ + public Requirement getRequirement() { + + return INeedsMaterialization.Requirement.SOMETIMES; + + } + private volatile transient Set<IVariable<IV>> terms; public Set<IVariable<IV>> getTermsToMaterialize() { Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/EBVBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/EBVBOp.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/EBVBOp.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -37,7 +37,9 @@ import com.bigdata.bop.IVariable; import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.NotMaterializedException; import com.bigdata.rdf.internal.XSDBooleanIV; +import com.bigdata.rdf.internal.constraints.INeedsMaterialization.Requirement; import com.bigdata.rdf.model.BigdataValue; /** @@ -127,6 +129,9 @@ final BigdataValue val = iv.getValue(); + if (val == null) + throw new NotMaterializedException(); + try { return QueryEvaluationUtil.getEffectiveBooleanValue(val); @@ -139,6 +144,16 @@ } + /** + * The EBVBOp only needs materialization if its internal value expression + * does not evaluate to an XSDBooleanIV. + */ + public Requirement getRequirement() { + + return INeedsMaterialization.Requirement.SOMETIMES; + + } + private volatile transient Set<IVariable<IV>> terms; public Set<IVariable<IV>> getTermsToMaterialize() { Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FuncBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FuncBOp.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FuncBOp.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -45,6 +45,7 @@ import com.bigdata.rdf.internal.NotMaterializedException; import com.bigdata.rdf.internal.TermId; import com.bigdata.rdf.internal.VTE; +import com.bigdata.rdf.internal.constraints.INeedsMaterialization.Requirement; import com.bigdata.rdf.model.BigdataValue; import com.bigdata.rdf.model.BigdataValueFactory; import com.bigdata.rdf.model.BigdataValueFactoryImpl; @@ -171,6 +172,15 @@ } + /** + * This bop can only work with materialized terms. + */ + public Requirement getRequirement() { + + return INeedsMaterialization.Requirement.ALWAYS; + + } + private volatile transient Set<IVariable<IV>> terms; public Set<IVariable<IV>> getTermsToMaterialize() { Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/INeedsMaterialization.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/INeedsMaterialization.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/INeedsMaterialization.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -37,6 +37,38 @@ */ public interface INeedsMaterialization { + public enum Requirement { + + /** + * Always needs materialization. + */ + ALWAYS, + + /** + * Only needs materialization if inline evaluation fails. + */ + SOMETIMES, + + /** + * Never needs materialization. + */ + NEVER + }; + + /** + * Does the bop always need materialized variables, or can it sometimes + * operate on inline terms without materialization? If sometimes, we'll + * run it before the materialization pipeline steps in an effort to avoid + * unnecessary materialization overhead. If it fails to evaluate for a + * particular solution, then it will be run again after the materialization + * steps for that solution. + */ + Requirement getRequirement(); + + /** + * Provide a set of terms that need to be materialized before the bop can + * evaluate. + */ Set<IVariable<IV>> getTermsToMaterialize(); - + } Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangBOp.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -41,6 +41,7 @@ import com.bigdata.rdf.internal.TermId; import com.bigdata.rdf.internal.VTE; import com.bigdata.rdf.internal.XSD; +import com.bigdata.rdf.internal.constraints.INeedsMaterialization.Requirement; import com.bigdata.rdf.model.BigdataLiteral; import com.bigdata.rdf.model.BigdataURI; import com.bigdata.rdf.model.BigdataValue; @@ -154,6 +155,15 @@ } + /** + * This bop can only work with materialized terms. + */ + public Requirement getRequirement() { + + return INeedsMaterialization.Requirement.ALWAYS; + + } + private volatile transient Set<IVariable<IV>> terms; public Set<IVariable<IV>> getTermsToMaterialize() { Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/LangMatchesBOp.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -39,6 +39,7 @@ import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.internal.NotMaterializedException; +import com.bigdata.rdf.internal.constraints.INeedsMaterialization.Requirement; import com.bigdata.rdf.model.BigdataValue; /** @@ -138,6 +139,15 @@ } + /** + * This bop can only work with materialized terms. + */ + public Requirement getRequirement() { + + return INeedsMaterialization.Requirement.ALWAYS; + + } + private volatile transient Set<IVariable<IV>> terms; public Set<IVariable<IV>> getTermsToMaterialize() { Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/MathBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/MathBOp.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/MathBOp.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -48,6 +48,8 @@ import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.internal.IVUtility; +import com.bigdata.rdf.internal.NotMaterializedException; +import com.bigdata.rdf.internal.constraints.INeedsMaterialization.Requirement; import com.bigdata.rdf.model.BigdataValue; /** @@ -177,6 +179,9 @@ final BigdataValue val2 = right.getValue(); + if (val1 == null || val2 == null) + throw new NotMaterializedException(); + if (!(val1 instanceof Literal) || !(val2 instanceof Literal)) { throw new SparqlTypeErrorException(); } @@ -273,6 +278,16 @@ } + /** + * The MathBOp can work on inline numerics. It is only when the operands + * evaluate to non-inline numerics that this bop needs materialization. + */ + public Requirement getRequirement() { + + return INeedsMaterialization.Requirement.SOMETIMES; + + } + private volatile transient Set<IVariable<IV>> terms; public Set<IVariable<IV>> getTermsToMaterialize() { Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/RegexBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/RegexBOp.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/RegexBOp.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -40,6 +40,7 @@ import com.bigdata.bop.IVariable; import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.constraints.INeedsMaterialization.Requirement; public class RegexBOp extends XSDBooleanIVValueExpression implements INeedsMaterialization { @@ -94,8 +95,15 @@ super(op); } + /** + * This bop can only work with materialized terms. + */ + public Requirement getRequirement() { + + return INeedsMaterialization.Requirement.ALWAYS; + + } - private volatile transient Set<IVariable<IV>> terms; public Set<IVariable<IV>> getTermsToMaterialize() { Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrBOp.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/StrBOp.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -38,6 +38,7 @@ import com.bigdata.rdf.error.SparqlTypeErrorException; import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.internal.WrappedIV; +import com.bigdata.rdf.internal.constraints.INeedsMaterialization.Requirement; import com.bigdata.rdf.model.BigdataLiteral; import com.bigdata.rdf.model.BigdataValueFactory; import com.bigdata.rdf.model.BigdataValueFactoryImpl; @@ -132,6 +133,15 @@ } + /** + * This bop can only work with materialized terms. + */ + public Requirement getRequirement() { + + return INeedsMaterialization.Requirement.ALWAYS; + + } + private volatile transient Set<IVariable<IV>> terms; public Set<IVariable<IV>> getTermsToMaterialize() { Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -33,9 +33,11 @@ import java.util.Enumeration; import java.util.HashSet; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; +import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; @@ -54,6 +56,7 @@ import com.bigdata.bop.IConstant; import com.bigdata.bop.IConstraint; import com.bigdata.bop.IPredicate; +import com.bigdata.bop.IValueExpression; import com.bigdata.bop.IVariable; import com.bigdata.bop.IVariableOrConstant; import com.bigdata.bop.NV; @@ -78,12 +81,16 @@ import com.bigdata.bop.rdf.join.InlineMaterializeOp; import com.bigdata.bop.solutions.SliceOp; import com.bigdata.rdf.internal.IV; +import com.bigdata.rdf.internal.NotMaterializedException; import com.bigdata.rdf.internal.TermId; import com.bigdata.rdf.internal.VTE; import com.bigdata.rdf.internal.constraints.INeedsMaterialization; +import com.bigdata.rdf.internal.constraints.INeedsMaterialization.Requirement; import com.bigdata.rdf.internal.constraints.IsInlineBOp; import com.bigdata.rdf.internal.constraints.IsMaterializedBOp; +import com.bigdata.rdf.internal.constraints.NeedsMaterializationBOp; import com.bigdata.rdf.internal.constraints.SPARQLConstraint; +import com.bigdata.rdf.internal.constraints.TryBeforeMaterializationConstraint; import com.bigdata.rdf.lexicon.LexPredicate; import com.bigdata.rdf.spo.DefaultGraphSolutionExpander; import com.bigdata.rdf.spo.ISPO; @@ -685,11 +692,9 @@ * add a materialization step in between the join and the constraint * evaluation. */ - final Collection<IConstraint> conditionals = - new LinkedList<IConstraint>(); + final Map<IConstraint, Set<IVariable<IV>>> needsMaterialization = + new LinkedHashMap<IConstraint, Set<IVariable<IV>>>(); - final Set<IVariable<IV>> terms = new LinkedHashSet<IVariable<IV>>(); - if (constraints != null && !constraints.isEmpty()) { // // decorate the predicate with any constraints. // pred = (Predicate<?>) pred.setProperty( @@ -700,6 +705,9 @@ final Collection<IConstraint> tmp = new LinkedList<IConstraint>(); tmp.addAll(constraints); + final Collection<IConstraint> tryBeforeMaterialization = + new LinkedList<IConstraint>(); + final Iterator<IConstraint> it = tmp.iterator(); while (it.hasNext()) { @@ -708,16 +716,37 @@ // if this constraint needs materialized variables, remove it // from the join and run it as a ConditionalRoutingOp later - if (gatherTermsToMaterialize(c, terms)) { + + final Set<IVariable<IV>> terms = + new LinkedHashSet<IVariable<IV>>(); + + final Requirement req = gatherTermsToMaterialize(c, terms); + + if (req != Requirement.NEVER) { it.remove(); + + if (req == Requirement.SOMETIMES) { + + tryBeforeMaterialization.add(c); + + } - conditionals.add(c); + needsMaterialization.put(c, terms); } } + for (IConstraint c : tryBeforeMaterialization) { + + // need to make a clone so that BOpUtility doesn't complain + c = (IConstraint) c.clone(); + + tmp.add(new TryBeforeMaterializationConstraint(c)); + + } + // add constraints to the join for that predicate. anns.add(new NV( PipelineJoin.Annotations.CONSTRAINTS, @@ -801,19 +830,34 @@ } - if (conditionals.size() > 0) { + if (needsMaterialization.size() > 0) { + + final Set<IVariable<IV>> alreadyMaterialized = + new LinkedHashSet<IVariable<IV>>(); - final int right = idFactory.incrementAndGet(); - - left = addMaterializationSteps(db, queryEngine, left, right, - terms, idFactory, queryHints); - - boolean first = true; - - for (IConstraint c : conditionals) { + for (Map.Entry<IConstraint, Set<IVariable<IV>>> e : + needsMaterialization.entrySet()) { + + final IConstraint c = e.getKey(); + + final Set<IVariable<IV>> terms = e.getValue(); + + // remove any terms already materialized + terms.removeAll(alreadyMaterialized); + + // add any new terms to the list of already materialized + alreadyMaterialized.addAll(terms); + + final int condId = idFactory.incrementAndGet(); - final int condId = first ? right : idFactory.incrementAndGet(); + // we might have already materialized everything we need + if (terms.size() > 0) { + + left = addMaterializationSteps(db, queryEngine, left, + condId, c, terms, idFactory, queryHints); + } + left = Rule2BOpUtility.applyQueryHints( new ConditionalRoutingOp(new BOp[]{left}, NV.asMap(new NV[]{// @@ -821,8 +865,6 @@ new NV(ConditionalRoutingOp.Annotations.CONDITION, c), })), queryHints); - first = false; - } } @@ -835,9 +877,10 @@ * Use the {@link INeedsMaterialization} interface to find and collect * variables that need to be materialized for this constraint. */ - public static boolean requiresMaterialization(final IConstraint c) { + public static boolean requiresMaterialization( + final IConstraint c) { - return gatherTermsToMaterialize(c, new HashSet<IVariable<IV>>()); + return gatherTermsToMaterialize(c, new HashSet<IVariable<IV>>()) != Requirement.NEVER; } @@ -845,10 +888,11 @@ * Use the {@link INeedsMaterialization} interface to find and collect * variables that need to be materialized for this constraint. */ - public static boolean gatherTermsToMaterialize(final IConstraint c, - final Set<IVariable<IV>> terms) { + public static INeedsMaterialization.Requirement gatherTermsToMaterialize( + final IConstraint c, final Set<IVariable<IV>> terms) { boolean materialize = false; + boolean always = false; final Iterator<BOp> it = BOpUtility.preOrderIterator(c); @@ -862,22 +906,33 @@ if (bop instanceof INeedsMaterialization) { - final Set<IVariable<IV>> t = - ((INeedsMaterialization) bop).getTermsToMaterialize(); + final INeedsMaterialization bop2 = (INeedsMaterialization) bop; + final Set<IVariable<IV>> t = bop2.getTermsToMaterialize(); + if (t.size() > 0) { terms.addAll(t); materialize = true; + // if any bops have terms that always needs materialization + // then mark the whole constraint as such + if (bop2.getRequirement() == Requirement.ALWAYS) { + + always = true; + + } + } } } - return materialize; + return materialize ? + (always ? Requirement.ALWAYS : Requirement.SOMETIMES) : + Requirement.NEVER; } @@ -909,6 +964,10 @@ * @param right * the right (downstream) operator that immediately follows the * materialization steps + * @param c + * the constraint to run on the IsMaterialized op to see if the + * materialization pipeline can be bypassed (bypass if true and + * no {@link NotMaterializedException} is thrown). * @param varsToMaterialize * the terms to materialize * @param idFactory @@ -921,9 +980,31 @@ public static PipelineOp addMaterializationSteps( final AbstractTripleStore db, final QueryEngine queryEngine, PipelineOp left, final int right, + final IConstraint c, final Collection<IVariable<IV>> varsToMaterialize, final AtomicInteger idFactory, final Properties queryHints) { + /* + * If the constraint "c" can run without a NotMaterializedException then + * bypass the pipeline + */ + { + + final IValueExpression ve = (IValueExpression) c.get(0); + + final IConstraint c2 = + new SPARQLConstraint(new NeedsMaterializationBOp(ve)); + + left = Rule2BOpUtility.applyQueryHints( + new ConditionalRoutingOp(new BOp[]{left}, + NV.asMap(new NV[]{// + new NV(BOp.Annotations.BOP_ID, idFactory.incrementAndGet()), + new NV(ConditionalRoutingOp.Annotations.CONDITION, c2), + new NV(PipelineOp.Annotations.ALT_SINK_REF, right), + })), queryHints); + + } + final Iterator<IVariable<IV>> it = varsToMaterialize.iterator(); int firstId = idFactory.incrementAndGet(); Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java 2011-06-17 19:57:12 UTC (rev 4729) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/sop/SOp2BOpUtility.java 2011-06-17 19:58:40 UTC (rev 4730) @@ -363,50 +363,11 @@ if (postConditionals.size() > 0) { - final Set<IVariable<IV>> toMaterialize = - new LinkedHashSet<IVariable<IV>>(); + left = addConditionals( + postConditionals, left, + idFactory, db, queryEngine, queryHints + ); - for (IConstraint c : postConditionals) { - Rule2BOpUtility.gatherTermsToMaterialize(c, toMaterialize); - } - - final int right = idFactory.incrementAndGet(); - - if (toMaterialize.size() > 0) { - - left = Rule2BOpUtility.addMaterializationSteps( - db, queryEngine, left, right, - toMaterialize, idFactory, queryHints); - - if (log.isDebugEnabled()) { - log.debug("added materialization steps:\n" + left); - } - - } - - boolean first = true; - - for (IConstraint c : postConditionals) { - - final int condId = first ? right : idFactory.incrementAndGet(); - - final PipelineOp condOp = - new ConditionalRoutingOp(new BOp[]{left}, - NV.asMap(new NV[]{// - new NV(BOp.Annotations.BOP_ID,condId), - new NV(ConditionalRoutingOp.Annotations.CONDITION, c), - })); - - left = condOp; - - if (log.isDebugEnabled()) { - log.debug("adding post-conditional routing op: " + condOp); - } - - first = false; - - } - } if (!left.getEvaluationContext() @@ -632,50 +593,11 @@ if (preConditionals != null) { // @todo lift into CONDITION on SubqueryOp - final Set<IVariable<IV>> toMaterialize = - new LinkedHashSet<IVariable<IV>>(); - - for (IConstraint c : preConditionals) { - Rule2BOpUtility.gatherTermsToMaterialize(c, toMaterialize); - } - - final int right = idFactory.incrementAndGet(); - - if (toMaterialize.size() > 0) { + left = addConditionals( + preConditionals, left, + idFactory, db, queryEngine, queryHints + ); - left = Rule2BOpUtility.addMaterializationSteps( - db, queryEngine, left, right, - toMaterialize, idFactory, queryHints); - - if (log.isDebugEnabled()) { - log.debug("added materialization steps:\n" + left); - } - - } - - boolean first = true; - - for (IConstraint c : preConditionals) { - - final int condId = first ? right : idFactory.incrementAndGet(); - - final PipelineOp condOp = Rule2BOpUtility.applyQueryHints( - new ConditionalRoutingOp(new BOp[]{left}, - NV.asMap(new NV[]{// - new NV(BOp.Annotations.BOP_ID,condId), - new NV(ConditionalRoutingOp.Annotations.CONDITION, c), - })), queryHints); - - left = condOp; - - if (log.isDebugEnabled()) { - log.debug("adding conditional routing op: " + condOp); - } - - first = false; - - } - } if (hashJoins.size() > 0) { @@ -736,7 +658,69 @@ return pred.getAccessPathExpander() instanceof FreeTextSearchExpander; } + + protected static final PipelineOp addConditionals( + final Collection<IConstraint> constraints, + final PipelineOp op, + final AtomicInteger idFactory, final AbstractTripleStore db, + final QueryEngine queryEngine, final Properties queryHints) { + PipelineOp left = op; + + final Map<IConstraint, Set<IVariable<IV>>> toMaterialize = + new LinkedHashMap<IConstraint, Set<IVariable<IV>>>(); + + for (IConstraint c : constraints) { + + final Set<IVariable<IV>> terms = + new LinkedHashSet<IVariable<IV>>(); + + Rule2BOpUtility.gatherTermsToMaterialize(c, terms); + + toMaterialize.put(c, terms); + + } + + final Set<IVariable<IV>> alreadyMaterialized = + new LinkedHashSet<IVariable<IV>>(); + + for (Map.Entry<IConstraint, Set<IVariable<IV>>> e : + toMaterialize.entrySet()) { + + final IConstraint c = e.getKey(); + + final Set<IVariable<IV>> terms = e.getValue(); + + // remove any terms already materialized + terms.removeAll(alreadyMaterialized); + + // add any new terms to the list of already materialized + alreadyMaterialized.addAll(terms); + + final int condId = idFactory.incrementAndGet(); + + // we might have already materialized everything we need + if (terms.size() > 0) { + + left = Rule2BOpUtility.addMaterializationSteps( + db, queryEngine, left, condId, c, + terms, idFactory, queryHints); + + } + + left = Rule2BOpUtility.applyQueryHints( + new ConditionalRoutingOp(new BOp[]{left}, + NV.asMap(new NV[]{// + new NV(BOp.Annotations.BOP_ID, condId), + new NV(ConditionalRoutingOp.Annotations.CONDITION, c), + })), queryHints); + + } + + return left; + + } + /** * Used by hashJoins. Temporary measure. Have to do this because normal * rule2BOp would attach all the constraints to the last tail, which would This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-06-17 23:34:42
|
Revision: 4734 http://bigdata.svn.sourceforge.net/bigdata/?rev=4734&view=rev Author: mrpersonick Date: 2011-06-17 23:34:35 +0000 (Fri, 17 Jun 2011) Log Message: ----------- more optimizations for CompareBOp, SameTermBOp Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FalseBOp.java branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/TrueBOp.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/SparqlTypeErrorBOp.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FalseBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FalseBOp.java 2011-06-17 22:57:30 UTC (rev 4733) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/FalseBOp.java 2011-06-17 23:34:35 UTC (rev 4734) @@ -28,6 +28,8 @@ import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IValueExpression; +import com.bigdata.rdf.internal.IV; /** * Always evaluates to false. @@ -47,6 +49,12 @@ } + public FalseBOp(final IValueExpression<? extends IV> x) { + + this(new BOp[] { x }, NOANNS); + + } + /** * Required shallow copy constructor. */ Added: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/SparqlTypeErrorBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/SparqlTypeErrorBOp.java (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/SparqlTypeErrorBOp.java 2011-06-17 23:34:35 UTC (rev 4734) @@ -0,0 +1,81 @@ +/* + +Copyright (C) SYSTAP, LLC 2006-2011. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +*/ +package com.bigdata.rdf.internal.constraints; + +import java.util.Map; + +import com.bigdata.bop.BOp; +import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IValueExpression; +import com.bigdata.rdf.error.SparqlTypeErrorException; +import com.bigdata.rdf.internal.IV; + +/** + * Always throws a SparqlTypeError. + */ +public class SparqlTypeErrorBOp extends XSDBooleanIVValueExpression { + + /** + * + */ + private static final long serialVersionUID = 2699085294332649839L; + + public static final SparqlTypeErrorBOp INSTANCE = new SparqlTypeErrorBOp(); + + private SparqlTypeErrorBOp() { + + this(NOARGS, NOANNS); + + } + + public SparqlTypeErrorBOp(final IValueExpression<? extends IV> x) { + + this(new BOp[] { x }, NOANNS); + + } + + /** + * Required shallow copy constructor. + */ + public SparqlTypeErrorBOp(final BOp[] args, final Map<String, Object> anns) { + + super(args, anns); + + } + + /** + * Required deep copy constructor. + */ + public SparqlTypeErrorBOp(final SparqlTypeErrorBOp op) { + super(op); + } + + public boolean accept(final IBindingSet bs) { + + throw new SparqlTypeErrorException(); + + } + +} Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/TrueBOp.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/TrueBOp.java 2011-06-17 22:57:30 UTC (rev 4733) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/internal/constraints/TrueBOp.java 2011-06-17 23:34:35 UTC (rev 4734) @@ -28,6 +28,8 @@ import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IValueExpression; +import com.bigdata.rdf.internal.IV; /** * Always evaluates to true. @@ -47,6 +49,12 @@ } + public TrueBOp(final IValueExpression<? extends IV> x) { + + this(new BOp[] { x }, NOANNS); + + } + /** * Required shallow copy constructor. */ Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-06-17 22:57:30 UTC (rev 4733) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-06-17 23:34:35 UTC (rev 4734) @@ -122,6 +122,7 @@ import com.bigdata.rdf.internal.constraints.RegexBOp; import com.bigdata.rdf.internal.constraints.SPARQLConstraint; import com.bigdata.rdf.internal.constraints.SameTermBOp; +import com.bigdata.rdf.internal.constraints.SparqlTypeErrorBOp; import com.bigdata.rdf.internal.constraints.StrBOp; import com.bigdata.rdf.internal.constraints.TrueBOp; import com.bigdata.rdf.lexicon.LexiconRelation; @@ -1948,25 +1949,61 @@ toVE(sameTerm.getLeftArg()); final IValueExpression<? extends IV> right = toVE(sameTerm.getRightArg()); - - /* - * If a constant operand in the SameTerm op uses a value not found - * in the database, we must defer to the CompareBOp, which can perform - * value comparisons. SameTermBOp only works on IVs. - */ - + + /* + * If a constant operand in the SameTerm op uses a value not found in + * the database, we end up in one of two possible situations: + * + * 1. If the constant operand is a URI, there is no possible way for + * SameTerm to evaluate to true, unless the other operand is a + * DatatypeBOp. (This is because DatatypeBOp will stamp phony TermId IVs + * for the datatypes for inline numerics and math operations.) So if the + * other operand is not a DatatypeBOp, we can just return a FalseBOp + * that wraps the SameTermBOp that would have happened (this wrapping is + * purely informational). + * + * 2. If the constant operand is not a URI, we need to defer to the + * CompareBOp, which knows how to do value comparisons. SameTermBOp only + * works on IVs. + */ if (left instanceof Constant) { + final IV iv = ((Constant<? extends IV>) left).get(); + if (iv.isTermId() && iv.getTermId() == TermId.NULL) { - return new CompareBOp(left, right, CompareOp.EQ); + + if (iv.isURI() && !(right instanceof DatatypeBOp)) { + + return new FalseBOp(new SameTermBOp(left, right)); + + } else { + + return new CompareBOp(left, right, CompareOp.EQ); + + } + } + } if (right instanceof Constant) { + final IV iv = ((Constant<? extends IV>) right).get(); + if (iv.isTermId() && iv.getTermId() == TermId.NULL) { - return new CompareBOp(left, right, CompareOp.EQ); + + if (iv.isURI() && !(left instanceof DatatypeBOp)) { + + return new FalseBOp(new SameTermBOp(left, right)); + + } else { + + return new CompareBOp(left, right, CompareOp.EQ); + + } + } + } return new SameTermBOp(left, right); @@ -1977,45 +2014,114 @@ toVE(compare.getLeftArg()); final IValueExpression<? extends IV> right = toVE(compare.getRightArg()); + + if (left.equals(right)) { + if (compare.getOperator() == CompareOp.EQ) { + return TrueBOp.INSTANCE; + } else { + return FalseBOp.INSTANCE; + } + } /* - * If the term is a Constant<URI> and the op is EQ or NE then we can - * do a sameTerm optimization. The URI constant must be a real term - * in the database. + * If we are dealing with a URI constant: + * + * We can use SparqlTypeErrorBOp for any operator other than EQ, NE + * + * If it's a real term: + * + * We can use SameTermBOp + * + * If it's not a real term: + * + * The only time we actually need to evaluate this is when the other + * operand is a DatatypeBOp. All other times, we can return FalseBOp for + * EQ and TrueBOp for NE. + * */ + final CompareOp op = compare.getOperator(); - if (op == CompareOp.EQ || op == CompareOp.NE) { - - if (left instanceof Constant && !(right instanceof DatatypeBOp)) { - final IV iv = ((Constant<? extends IV>) left).get(); - if (iv.isURI() && iv.getTermId() != TermId.NULL) { - return new SameTermBOp(left, right, op); - } - } - - if (right instanceof Constant && !(left instanceof DatatypeBOp)) { - final IV iv = ((Constant<? extends IV>) right).get(); - if (iv.isURI() && iv.getTermId() != TermId.NULL) { - return new SameTermBOp(left, right, op); - } - } - + + if (left instanceof Constant) { + + final IV iv = ((Constant<? extends IV>) left).get(); + + if (iv.isURI()) { + + if (!(op == CompareOp.EQ || op == CompareOp.NE)) { + + return new SparqlTypeErrorBOp(new CompareBOp(left, right, op)); + + } + + if (iv.getTermId() != TermId.NULL) { + + return new SameTermBOp(left, right, op); + + } else { + + if (!(right instanceof DatatypeBOp)) { + + if (op == CompareOp.EQ) { + + return new FalseBOp(new CompareBOp(left, right, op)); + + } else { + + return new TrueBOp(new CompareBOp(left, right, op)); + + } + + + } + + } + + } + } - if (log.isDebugEnabled()) { - log.debug(left == right); - log.debug(left.equals(right)); - } - - if (left.equals(right)) { - if (compare.getOperator() == CompareOp.EQ) { - return TrueBOp.INSTANCE; - } else { - return FalseBOp.INSTANCE; + if (right instanceof Constant) { + + final IV iv = ((Constant<? extends IV>) right).get(); + + if (iv.isURI()) { + + if (!(op == CompareOp.EQ || op == CompareOp.NE)) { + + return new SparqlTypeErrorBOp(new CompareBOp(left, right, op)); + + } + + if (iv.getTermId() != TermId.NULL) { + + return new SameTermBOp(left, right, op); + + } else { + + if (!(left instanceof DatatypeBOp)) { + + if (op == CompareOp.EQ) { + + return new FalseBOp(new CompareBOp(left, right, op)); + + } else { + + return new TrueBOp(new CompareBOp(left, right, op)); + + } + + + } + + } + } + } - return new CompareBOp(left, right, compare.getOperator()); + return new CompareBOp(left, right, op); + } private IValueExpression<? extends IV> toVE(final Bound bound) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-22 13:17:35
|
Revision: 4769 http://bigdata.svn.sourceforge.net/bigdata/?rev=4769&view=rev Author: thompsonbry Date: 2011-06-22 13:17:28 +0000 (Wed, 22 Jun 2011) Log Message: ----------- Updated the zookeeper bundled dependency to 3.3.3 from 3.2.1. The specific version is now parameterized through build.properties. Build.xml was modified to support that version parameterization. Updated the tuprolog dependency from 2.1.1 to 2.2 and bundled the 2p jar rather than the tuprolog jar. The 2p jar includes the CLI and GUI interfaces and is more useful for interactive work with prolog. The tuprolog jar contains the core of the prolog implementation is might be all that it required for some applications, but it is not really enough for application development. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/.classpath branches/QUADS_QUERY_BRANCH/build.properties branches/QUADS_QUERY_BRANCH/build.xml Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata/lib/tuprolog/2p-v2.2.jar branches/QUADS_QUERY_BRANCH/bigdata-jini/lib/apache/zookeeper-3.3.3.jar Removed Paths: ------------- branches/QUADS_QUERY_BRANCH/bigdata/lib/tuprolog/tuprolog-v2.1.1.jar branches/QUADS_QUERY_BRANCH/bigdata-jini/lib/apache/zookeeper-3.2.1.jar Modified: branches/QUADS_QUERY_BRANCH/.classpath =================================================================== --- branches/QUADS_QUERY_BRANCH/.classpath 2011-06-22 12:21:32 UTC (rev 4768) +++ branches/QUADS_QUERY_BRANCH/.classpath 2011-06-22 13:17:28 UTC (rev 4769) @@ -18,11 +18,10 @@ <classpathentry kind="src" path="ctc-striterators/src/java"/> <classpathentry kind="src" path="ctc-striterators/src/test"/> <classpathentry kind="src" path="bigdata-perf/bsbm/src/test"/> - <classpathentry kind="lib" path="bigdata-jini/lib/apache/zookeeper-3.2.1.jar"/> + <classpathentry kind="src" path="bigdata-sails/src/prolog"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/dsi-utils-1.0.6-020610.jar"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/lgpl-utils-1.0.6-020610.jar"/> <classpathentry exported="true" kind="lib" path="bigdata-rdf/lib/nxparser-6-22-2010.jar"/> - <classpathentry kind="lib" path="bigdata/lib/tuprolog/tuprolog-v2.1.1.jar"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-continuation-7.2.2.v20101205.jar"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-http-7.2.2.v20101205.jar"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-io-7.2.2.v20101205.jar"/> @@ -33,6 +32,7 @@ <classpathentry kind="lib" path="bigdata/lib/jetty/jetty-security-7.2.2.v20101205.jar"/> <classpathentry kind="lib" path="bigdata/lib/jetty/jetty-webapp-7.2.2.v20101205.jar"/> <classpathentry kind="lib" path="bigdata/lib/jetty/jetty-xml-7.2.2.v20101205.jar"/> + <classpathentry kind="lib" path="bigdata/lib/tuprolog/2p-v2.2.jar"/> <classpathentry kind="src" path="lgpl-utils/src/java"/> <classpathentry kind="src" path="lgpl-utils/src/test"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/icu/icu4j-3_6.jar"/> @@ -71,5 +71,6 @@ <classpathentry kind="lib" path="bigdata-sails/lib/sesame-sparql-testsuite-2.3.0.jar"/> <classpathentry kind="lib" path="bigdata-sails/lib/sesame-store-testsuite-2.3.0.jar"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/high-scale-lib-v1.1.2.jar"/> + <classpathentry exported="true" kind="lib" path="bigdata-jini/lib/apache/zookeeper-3.3.3.jar"/> <classpathentry kind="output" path="bin"/> </classpath> Added: branches/QUADS_QUERY_BRANCH/bigdata/lib/tuprolog/2p-v2.2.jar =================================================================== (Binary files differ) Property changes on: branches/QUADS_QUERY_BRANCH/bigdata/lib/tuprolog/2p-v2.2.jar ___________________________________________________________________ Added: svn:mime-type + application/octet-stream Deleted: branches/QUADS_QUERY_BRANCH/bigdata/lib/tuprolog/tuprolog-v2.1.1.jar =================================================================== (Binary files differ) Deleted: branches/QUADS_QUERY_BRANCH/bigdata-jini/lib/apache/zookeeper-3.2.1.jar =================================================================== (Binary files differ) Added: branches/QUADS_QUERY_BRANCH/bigdata-jini/lib/apache/zookeeper-3.3.3.jar =================================================================== (Binary files differ) Property changes on: branches/QUADS_QUERY_BRANCH/bigdata-jini/lib/apache/zookeeper-3.3.3.jar ___________________________________________________________________ Added: svn:mime-type + application/octet-stream Modified: branches/QUADS_QUERY_BRANCH/build.properties =================================================================== --- branches/QUADS_QUERY_BRANCH/build.properties 2011-06-22 12:21:32 UTC (rev 4768) +++ branches/QUADS_QUERY_BRANCH/build.properties 2011-06-22 13:17:28 UTC (rev 4769) @@ -28,6 +28,9 @@ #javac.source=1.6 javac.encoding=Cp1252 +# The zookeeper version. +zookeeper.version=3.3.3 + # Set to false to NOT start services (zookeeper, lookup server, class server, etc). # When false, tests which depend on those services will not run. (This can also be # set by CI if you leave if undefined here.) For example: @@ -386,7 +389,8 @@ # CI properties. These must agree with the actual installation directory and zoo.cfg # file for the zookeeper instance used to run CI. -test.zookeeper.installDir=/Users/bryan/zookeeper-3.2.1 +test.zookeeper.installDir=/Users/bryan/zookeeper-${zookeeper.version} #test.zookeeper.installDir=/usr/java/zookeeper-3.2.1 +#test.zookeeper.installDir=/usr/local/zookeeper-${zookeeper.version} test.zookeeper.tickTime=2000 test.zookeeper.clientPort=2081 Modified: branches/QUADS_QUERY_BRANCH/build.xml =================================================================== --- branches/QUADS_QUERY_BRANCH/build.xml 2011-06-22 12:21:32 UTC (rev 4768) +++ branches/QUADS_QUERY_BRANCH/build.xml 2011-06-22 13:17:28 UTC (rev 4769) @@ -211,7 +211,7 @@ <bndwrap jars="${build.dir}/lib/lgpl-utils-1.0.6-020610.jar" output="${build.dir}/bundles/lgpl-utils-1.0.6-020610.jar" definitions="${basedir}/osgi/" /> <bndwrap jars="${build.dir}/lib/high-scale-lib-v1.1.2.jar" output="${build.dir}/bundles/high-scale-lib-v1.1.2.jar" definitions="${basedir}/osgi/" /> <bndwrap jars="${build.dir}/lib/openrdf-sesame-2.3.0-onejar.jar" output="${build.dir}/bundles/openrdf-sesame-2.3.0.jar" definitions="${basedir}/osgi/" /> - <bndwrap jars="${build.dir}/lib/apache/zookeeper-3.2.1.jar" output="${build.dir}/bundles/zookeeper-3.2.1.jar" definitions="${basedir}/osgi/" /> + <bndwrap jars="${build.dir}/lib/apache/zookeeper-${zookeeper.version}.jar" output="${build.dir}/bundles/zookeeper-${zookeeper.version}.jar" definitions="${basedir}/osgi/" /> <bndwrap jars="${build.dir}/lib/nxparser-6-22-2010.jar" output="${build.dir}/bundles/nxparser-2010.6.22.jar" definitions="${basedir}/osgi/" /> </target> @@ -911,7 +911,7 @@ tofile="${dist.lib}/nxparser.jar" /> <!-- Zookeeper library --> - <copy file="${bigdata-zookeeper.lib}/zookeeper-3.2.1.jar" + <copy file="${bigdata-zookeeper.lib}/zookeeper-${zookeeper.version}.jar" tofile="${dist.lib}/zookeeper.jar" /> <!-- Jini library --> @@ -1753,20 +1753,26 @@ <!-- so I have backed it out for now. --> <target name="startZookeeper"> <echo message="test.zookeeper.installDir=${test.zookeeper.installDir}"/> - <echo>bin/zkServer.sh start + <echo>bin/zkServer.(sh|cmd) start </echo> - <exec executable="bin/zkServer.sh" dir="${test.zookeeper.installDir}" logerror="true"> + <exec executable="bin/zkServer.sh" dir="${test.zookeeper.installDir}" logerror="true" osfamily="unix"> <arg value="start"/> </exec> + <exec executable="bin/zkServer.cmd" dir="${test.zookeeper.installDir}" logerror="true" osfamily="windows"> + <arg value="start"/> + </exec> </target> <target name="stopZookeeper"> <echo message="test.zookeeper.installDir=${test.zookeeper.installDir}"/> - <echo>bin/zkServer.sh stop + <echo>bin/zkServer.(sh|cmd) stop </echo> - <exec executable="bin/zkServer.sh" dir="${test.zookeeper.installDir}" logerror="true"> + <exec executable="bin/zkServer.sh" dir="${test.zookeeper.installDir}" logerror="true" osfamily="unix"> <arg value="stop"/> </exec> + <exec executable="bin/zkServer.cmd" dir="${test.zookeeper.installDir}" logerror="true" osfamily="windows"> + <arg value="stop"/> + </exec> </target> <!-- runs all junit tests --> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-22 18:06:44
|
Revision: 4771 http://bigdata.svn.sourceforge.net/bigdata/?rev=4771&view=rev Author: thompsonbry Date: 2011-06-22 18:06:38 +0000 (Wed, 22 Jun 2011) Log Message: ----------- Modified to use the same timestamp as the triple store view for lexicon joins unless the join is executed during a full read/write transaction, in which case we use the UNISOLATED view to ensure that writes are visible. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexPredicate.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexPredicate.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexPredicate.java 2011-06-22 15:08:24 UTC (rev 4770) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/lexicon/LexPredicate.java 2011-06-22 18:06:38 UTC (rev 4771) @@ -48,10 +48,18 @@ * * @param relationName * the namespace of the lexicon relation + * @param timestamp + * The timestamp of the view to read on. This should be the same + * as the timestamp associated with the view of the triple store + * except for a full read/write transaction. Since all writes on + * the lexicon are unisolated, a full read/write transaction must + * use the {@link ITx#UNISOLATED} view of the lexicon in order to + * ensure that any writes it performs will be visible. * @param term * the term to resolve using forward lookup (term2id) */ - public static LexPredicate forwardInstance(final String relationName, + public static LexPredicate forwardInstance(final String relationName, + final long timestamp, final IVariableOrConstant<BigdataValue> term) { return new LexPredicate( @@ -60,7 +68,7 @@ Var.var(), // iv }, new NV(Annotations.RELATION_NAME, new String[] { relationName }), - new NV(Annotations.TIMESTAMP, ITx.UNISOLATED) // + new NV(Annotations.TIMESTAMP, timestamp) // ); } @@ -71,10 +79,18 @@ * * @param relationName * the namespace of the lexicon relation + * @param timestamp + * The timestamp of the view to read on. This should be the same + * as the timestamp associated with the view of the triple store + * except for a full read/write transaction. Since all writes on + * the lexicon are unisolated, a full read/write transaction must + * use the {@link ITx#UNISOLATED} view of the lexicon in order to + * ensure that any writes it performs will be visible. * @param term * the term to resolve using reverse lookup (id2term) */ - public static LexPredicate reverseInstance(final String relationName, + public static LexPredicate reverseInstance(final String relationName, + final long timestamp, final IVariableOrConstant<IV> term) { return new LexPredicate( @@ -83,7 +99,7 @@ term, // iv }, new NV(Annotations.RELATION_NAME, new String[] { relationName }), - new NV(Annotations.TIMESTAMP, ITx.UNISOLATED) // + new NV(Annotations.TIMESTAMP, timestamp) // ); } Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2011-06-22 15:08:24 UTC (rev 4770) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/Rule2BOpUtility.java 2011-06-22 18:06:38 UTC (rev 4771) @@ -80,6 +80,8 @@ import com.bigdata.bop.rdf.join.DataSetJoin; import com.bigdata.bop.rdf.join.InlineMaterializeOp; import com.bigdata.bop.solutions.SliceOp; +import com.bigdata.journal.ITx; +import com.bigdata.journal.TimestampUtility; import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.internal.NotMaterializedException; import com.bigdata.rdf.internal.TermId; @@ -1064,8 +1066,20 @@ log.debug("adding 2nd conditional routing op: " + condOp2); } - final Predicate lexPred = LexPredicate.reverseInstance( - db.getLexiconRelation().getNamespace(), v); + final Predicate lexPred; + { + /* + * Note: Use the timestamp of the triple store view unless this + * is a read/write transaction, in which case we need to use the + * unisolated view in order to see any writes which it may have + * performed (lexicon writes are always unisolated). + */ + long timestamp = db.getTimestamp(); + if (TimestampUtility.isReadWriteTx(timestamp)) + timestamp = ITx.UNISOLATED; + lexPred = LexPredicate.reverseInstance(db.getLexiconRelation() + .getNamespace(), timestamp, v); + } if (log.isDebugEnabled()) { log.debug("lex pred: " + lexPred); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-24 16:12:02
|
Revision: 4792 http://bigdata.svn.sourceforge.net/bigdata/?rev=4792&view=rev Author: thompsonbry Date: 2011-06-24 16:11:56 +0000 (Fri, 24 Jun 2011) Log Message: ----------- Modified the BigdataValueReplacer to use a DummyIV rather than throwing an exception if a value bound using AbstractQuery#setBinding(name,value) is not known to the database. Removed TODO from BigdataOpenRDFBindingSetResolverator. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/store/BigdataOpenRDFBindingSetsResolverator.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/store/BigdataOpenRDFBindingSetsResolverator.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/store/BigdataOpenRDFBindingSetsResolverator.java 2011-06-24 15:17:57 UTC (rev 4791) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/store/BigdataOpenRDFBindingSetsResolverator.java 2011-06-24 16:11:56 UTC (rev 4792) @@ -203,29 +203,22 @@ final BigdataValue outVal = map.get(value); - /* - * TODO An alternative to using a DummyIV would be to drop the - * BindingSet if there are any Values in it which are not known to - * the database. - */ - if (outVal != null) { + assert outVal != null; - final Constant<?> c; - - if (outVal.getIV() == null) { + final Constant<?> c; + + if (outVal.getIV() == null) { - c = new Constant(DummyIV.INSTANCE); - - } else { - - c = new Constant(outVal.getIV()); - - } + c = new Constant(DummyIV.INSTANCE); - out.set(Var.var(name), c); - + } else { + + c = new Constant(outVal.getIV()); + } + out.set(Var.var(name), c); + } return out; Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java 2011-06-24 15:17:57 UTC (rev 4791) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataValueReplacer.java 2011-06-24 16:11:56 UTC (rev 4792) @@ -38,7 +38,6 @@ import org.openrdf.query.Binding; import org.openrdf.query.BindingSet; import org.openrdf.query.Dataset; -import org.openrdf.query.algebra.LangMatches; import org.openrdf.query.algebra.StatementPattern; import org.openrdf.query.algebra.TupleExpr; import org.openrdf.query.algebra.ValueConstant; @@ -49,6 +48,7 @@ import org.openrdf.query.impl.MapBindingSet; import org.openrdf.sail.SailException; +import com.bigdata.rdf.internal.DummyIV; import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.model.BigdataValue; import com.bigdata.rdf.model.BigdataValueFactory; @@ -319,6 +319,10 @@ if (bindings != null) { + /* + * Replace the bindings with one's which have their IV set. + */ + final MapBindingSet bindings2 = new MapBindingSet(); final Iterator<Binding> it = bindings.iterator(); @@ -349,21 +353,31 @@ log.debug("value: " + val + " : " + val2 + " (" + val2.getIV() + ")"); - if (val2.getIV() == null) { - +// if (val2.getIV() == null) { +// +// /* +// * Since the term identifier is NULL this value is not known +// * to the kb. +// */ +// +// if (log.isInfoEnabled()) +// log.info("Not in knowledge base: " + val2); +// +// } + + if(val2.getIV() == null) { /* - * Since the term identifier is NULL this value is - * not known to the kb. + * The Value is not in the database, so assign it a mock IV. + * This IV will not match anything during query. However, we + * can not simply fail the query since an OPTIONAL or UNION + * might have solutions even though this Value is not known. */ - - if(log.isInfoEnabled()) - log.info("Not in knowledge base: " + val2); - + val2.setIV(DummyIV.INSTANCE); } - // replace the constant in the query. + // rewrite the constant in the query. bindings2.addBinding(binding.getName(), val2); - + } bindings = bindings2; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2011-06-24 17:43:34
|
Revision: 4794 http://bigdata.svn.sourceforge.net/bigdata/?rev=4794&view=rev Author: thompsonbry Date: 2011-06-24 17:43:27 +0000 (Fri, 24 Jun 2011) Log Message: ----------- Bumped the version number. Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/build.properties Added Paths: ----------- branches/QUADS_QUERY_BRANCH/bigdata/src/releases/RELEASE_1_0_0.txt Added: branches/QUADS_QUERY_BRANCH/bigdata/src/releases/RELEASE_1_0_0.txt =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata/src/releases/RELEASE_1_0_0.txt (rev 0) +++ branches/QUADS_QUERY_BRANCH/bigdata/src/releases/RELEASE_1_0_0.txt 2011-06-24 17:43:27 UTC (rev 4794) @@ -0,0 +1,70 @@ +This is a bigdata (R) release. This release is capable of loading 1B triples in +under one hour on a 15 node cluster. JDK 1.6 is required. + +Bigdata(R) is a horizontally scaled open source architecture for indexed data +with an emphasis on semantic web data architectures. Bigdata operates in both +a single machine mode (Journal) and a cluster mode (Federation). The Journal +provides fast scalable ACID indexed storage for very large data sets. The +federation provides fast scalable shard-wise parallel indexed storage using +dynamic sharding and shard-wise ACID updates. Both platforms support fully +concurrent readers with snapshot isolation. + +Distributed processing offers greater throughput but does not reduce query or +update latency. Choose the Journal when the anticipated scale and throughput +requirements permit. Choose the Federation when the administrative and machine +overhead associated with operating a cluster is an acceptable tradeoff to have +essentially unlimited data scaling and throughput. + +See [1,2,8] for instructions on installing bigdata(R), [4] for the javadoc, and +[3,5,6] for news, questions, and the latest developments. For more information +about SYSTAP, LLC and bigdata, see [7]. + +Starting with this release, we offer a WAR artifact [8] for easy installation of +the Journal mode database. For custom development and cluster installations we +recommend checking out the code from SVN using the tag for this release. The +code will build automatically under eclipse. You can also build the code using +the ant script. The cluster installer requires the use of the ant script. You +can checkout this release from the following URL: + +https://bigdata.svn.sourceforge.net/svnroot/bigdata/branches/BIGDATA_RELEASE_1_0_0 + +New features: + +- Single machine data storage to ~50B triples/quads (RWStore); +- Simple embedded and/or webapp deployment (NanoSparqlServer); +- 100% native SPARQL 1.0 evaluation with lots of query optimizations; + +Feature summary: + +- Triples, quads, or triples with provenance (SIDs); +- Fast RDFS+ inference and truth maintenance; +- Clustered data storage is essentially unlimited; +- Fast statement level provenance mode (SIDs). + +The road map [3] for the next releases includes: + +- High-volume analytic query and SPARQL 1.1 query, including aggregations; +- Simplified deployment, configuration, and administration for clusters; and +- High availability for the journal and the cluster. + +For more information, please see the following links: + +[1] https://sourceforge.net/apps/mediawiki/bigdata/index.php?title=Main_Page +[2] https://sourceforge.net/apps/mediawiki/bigdata/index.php?title=GettingStarted +[3] https://sourceforge.net/apps/mediawiki/bigdata/index.php?title=Roadmap +[4] http://www.bigdata.com/bigdata/docs/api/ +[5] http://sourceforge.net/projects/bigdata/ +[6] http://www.bigdata.com/blog +[7] http://www.systap.com/bigdata.htm +[8] https://sourceforge.net/projects/bigdata/files/bigdata/ + +About bigdata: + +Bigdata\xAE is a horizontally-scaled, general purpose storage and computing fabric +for ordered data (B+Trees), designed to operate on either a single server or a +cluster of commodity hardware. Bigdata\xAE uses dynamically partitioned key-range +shards in order to remove any realistic scaling limits - in principle, bigdata\xAE +may be deployed on 10s, 100s, or even thousands of machines and new capacity may +be added incrementally without requiring the full reload of all data. The bigdata\xAE +RDF database supports RDFS and OWL Lite reasoning, high-level query (SPARQL), +and datum level provenance. Property changes on: branches/QUADS_QUERY_BRANCH/bigdata/src/releases/RELEASE_1_0_0.txt ___________________________________________________________________ Added: svn:keywords + Id Date Revision Author HeadURL Modified: branches/QUADS_QUERY_BRANCH/build.properties =================================================================== --- branches/QUADS_QUERY_BRANCH/build.properties 2011-06-24 17:29:25 UTC (rev 4793) +++ branches/QUADS_QUERY_BRANCH/build.properties 2011-06-24 17:43:27 UTC (rev 4794) @@ -45,9 +45,9 @@ # Where the releases will be written. release.dir=ant-release -# The build version (note: 0.82b -> 0.82.0) -build.ver=0.83.2 -build.ver.osgi=0.83 +# The build version (note: 0.82b -> 0.82.0); 0.83.2 is followed by 1.0.0 +build.ver=1.0.0 +build.ver.osgi=1.0 # Set true to do a snapshot build. This changes the value of ${version} to # include the date. This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2011-06-29 21:34:56
|
Revision: 4819 http://bigdata.svn.sourceforge.net/bigdata/?rev=4819&view=rev Author: mrpersonick Date: 2011-06-29 21:34:49 +0000 (Wed, 29 Jun 2011) Log Message: ----------- added the ability to request the hit rank for full text search hits and sparql query bindings Modified Paths: -------------- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/store/BD.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/FreeTextSearchExpander.java Modified: branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/store/BD.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/store/BD.java 2011-06-29 20:01:56 UTC (rev 4818) +++ branches/QUADS_QUERY_BRANCH/bigdata-rdf/src/java/com/bigdata/rdf/store/BD.java 2011-06-29 21:34:49 UTC (rev 4819) @@ -158,6 +158,22 @@ * <p> * <pre> * + * select ?s ?rank + * where { + * ?s bd:search "scale-out RDF triplestore" . + * ?s bd:rank ?rank . + * } + * + * </pre> + */ + final URI RANK = new URIImpl(SEARCH_NAMESPACE+"rank"); + + /** + * Magic predicate used to query for free text search metadata. Use + * in conjunction with {@link #SEARCH} as follows: + * <p> + * <pre> + * * select ?s * where { * ?s bd:search "scale-out RDF triplestore" . Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-06-29 20:01:56 UTC (rev 4818) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataEvaluationStrategyImpl3.java 2011-06-29 21:34:49 UTC (rev 4819) @@ -774,6 +774,7 @@ final Value p = sp.getPredicateVar().getValue(); if (s == null && p != null && (BD.RELEVANCE.equals(p) || + BD.RANK.equals(p) || BD.MIN_RANK.equals(p) || BD.MAX_RANK.equals(p) || BD.MIN_RELEVANCE.equals(p) || @@ -1836,6 +1837,7 @@ com.bigdata.bop.Var.var(subjVar.getName()); IVariableOrConstant<IV> relevance = new Constant(DummyIV.INSTANCE); + IVariableOrConstant<IV> rank = new Constant(DummyIV.INSTANCE); Literal minRank = null; Literal maxRank = null; Literal minRelevance = null; @@ -1857,6 +1859,11 @@ throw new IllegalArgumentException("illegal metadata: " + meta); } relevance = com.bigdata.bop.Var.var(oVar.getName()); + } else if (BD.RANK.equals(pVal)) { + if (oVar.hasValue()) { + throw new IllegalArgumentException("illegal metadata: " + meta); + } + rank = com.bigdata.bop.Var.var(oVar.getName()); } else if (BD.MIN_RANK.equals(pVal)) { if (oVal == null || !(oVal instanceof Literal)) { throw new IllegalArgumentException("illegal metadata: " + meta); @@ -1893,7 +1900,7 @@ final BOp[] vars = new BOp[] { search, // s = searchVar relevance, // p = relevanceVar - new Constant(DummyIV.INSTANCE), // o = reserved + rank, // o = rankVar new Constant(DummyIV.INSTANCE), // c = reserved }; Modified: branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/FreeTextSearchExpander.java =================================================================== --- branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/FreeTextSearchExpander.java 2011-06-29 20:01:56 UTC (rev 4818) +++ branches/QUADS_QUERY_BRANCH/bigdata-sails/src/java/com/bigdata/rdf/sail/FreeTextSearchExpander.java 2011-06-29 21:34:49 UTC (rev 4819) @@ -17,6 +17,7 @@ import com.bigdata.rdf.internal.TermId; import com.bigdata.rdf.internal.VTE; import com.bigdata.rdf.internal.XSDDoubleIV; +import com.bigdata.rdf.internal.XSDIntIV; import com.bigdata.rdf.lexicon.ITextIndexer; import com.bigdata.rdf.model.BigdataValue; import com.bigdata.rdf.spo.ISPO; @@ -317,8 +318,8 @@ final ISPO[] spos = new ISPO[hits.length]; for (int i = 0; i < hits.length; i++) { final IV s = new TermId(VTE.LITERAL, hits[i].getDocId()); - final IV p = new XSDDoubleIV(hits[i].getCosine()); - final IV o = null; // reserved + final IV p = new XSDDoubleIV(hits[i].getCosine()); // cosine + final IV o = new XSDIntIV(hits[i].getRank()); // rank final IV c = null; // reserved spos[i] = new SPO(s, p, o, c); if (log.isInfoEnabled()) @@ -333,8 +334,8 @@ for (IHit hit : hits) { final IV s = new TermId(VTE.LITERAL, hit.getDocId()); if (s == boundVal) { - final IV p = new XSDDoubleIV(hit.getCosine()); - final IV o = null; // reserved + final IV p = new XSDDoubleIV(hit.getCosine()); // cosine + final IV o = new XSDIntIV(hit.getRank()); // rank final IV c = null; // reserved result = new ISPO[] { new SPO(s, p, o, c) }; break; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |