This list is closed, nobody may subscribe to it.
2010 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
(139) |
Aug
(94) |
Sep
(232) |
Oct
(143) |
Nov
(138) |
Dec
(55) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2011 |
Jan
(127) |
Feb
(90) |
Mar
(101) |
Apr
(74) |
May
(148) |
Jun
(241) |
Jul
(169) |
Aug
(121) |
Sep
(157) |
Oct
(199) |
Nov
(281) |
Dec
(75) |
2012 |
Jan
(107) |
Feb
(122) |
Mar
(184) |
Apr
(73) |
May
(14) |
Jun
(49) |
Jul
(26) |
Aug
(103) |
Sep
(133) |
Oct
(61) |
Nov
(51) |
Dec
(55) |
2013 |
Jan
(59) |
Feb
(72) |
Mar
(99) |
Apr
(62) |
May
(92) |
Jun
(19) |
Jul
(31) |
Aug
(138) |
Sep
(47) |
Oct
(83) |
Nov
(95) |
Dec
(111) |
2014 |
Jan
(125) |
Feb
(60) |
Mar
(119) |
Apr
(136) |
May
(270) |
Jun
(83) |
Jul
(88) |
Aug
(30) |
Sep
(47) |
Oct
(27) |
Nov
(23) |
Dec
|
2015 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
|
Sep
(3) |
Oct
|
Nov
|
Dec
|
2016 |
Jan
|
Feb
|
Mar
(4) |
Apr
(1) |
May
|
Jun
|
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
From: <mrp...@us...> - 2014-05-08 18:33:07
|
Revision: 8234 http://sourceforge.net/p/bigdata/code/8234 Author: mrpersonick Date: 2014-05-08 18:33:04 +0000 (Thu, 08 May 2014) Log Message: ----------- rolling forward changes backed out from main branch Modified Paths: -------------- branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java branches/BLUEPRINTS/build.xml Added Paths: ----------- branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java Added: branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java =================================================================== --- branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java (rev 0) +++ branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java 2014-05-08 18:33:04 UTC (rev 8234) @@ -0,0 +1,157 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.rdf.sail.webapp; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.log4j.Logger; + +import com.bigdata.blueprints.BigdataGraphBulkLoad; +import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; +import com.bigdata.rdf.sail.webapp.client.MiniMime; +import com.bigdata.rdf.store.AbstractTripleStore; +import com.tinkerpop.blueprints.util.io.graphml.GraphMLReader; + +/** + * Helper servlet for the blueprints layer. + */ +public class BlueprintsServlet extends BigdataRDFServlet { + + /** + * + */ + private static final long serialVersionUID = 1L; + + static private final transient Logger log = Logger.getLogger(BlueprintsServlet.class); + + static public final List<String> mimeTypes = Arrays.asList(new String[] { + "application/graphml+xml" + }) ; + + /** + * Flag to signify a blueprints operation. + */ + static final transient String ATTR_BLUEPRINTS = "blueprints"; + +// /** +// * Flag to signify a convert operation. POST an RDF document with a +// * content type and an accept header for what it should be converted to. +// */ +// static final transient String ATTR_CONVERT = "convert"; + + + public BlueprintsServlet() { + + } + + /** + * Post a GraphML file to the blueprints layer. + */ + @Override + protected void doPost(final HttpServletRequest req, + final HttpServletResponse resp) throws IOException { + + final long begin = System.currentTimeMillis(); + + final String namespace = getNamespace(req); + + final long timestamp = getTimestamp(req); + + final AbstractTripleStore tripleStore = getBigdataRDFContext() + .getTripleStore(namespace, timestamp); + + if (tripleStore == null) { + /* + * There is no such triple/quad store instance. + */ + buildResponse(resp, HTTP_NOTFOUND, MIME_TEXT_PLAIN); + return; + } + + final String contentType = req.getContentType(); + + if (log.isInfoEnabled()) + log.info("Request body: " + contentType); + + final String mimeType = new MiniMime(contentType).getMimeType().toLowerCase(); + + if (!mimeTypes.contains(mimeType)) { + + buildResponse(resp, HTTP_BADREQUEST, MIME_TEXT_PLAIN, + "Content-Type not recognized as graph data: " + contentType); + + return; + + } + + try { + + BigdataSailRepositoryConnection conn = null; + try { + + conn = getBigdataRDFContext() + .getUnisolatedConnection(namespace); + + final BigdataGraphBulkLoad graph = new BigdataGraphBulkLoad(conn); + + GraphMLReader.inputGraph(graph, req.getInputStream()); + + graph.commit(); + + final long nmodified = graph.getMutationCountLastCommit(); + + final long elapsed = System.currentTimeMillis() - begin; + + reportModifiedCount(resp, nmodified, elapsed); + + return; + + } catch(Throwable t) { + + if(conn != null) + conn.rollback(); + + throw new RuntimeException(t); + + } finally { + + if (conn != null) + conn.close(); + + } + + } catch (Exception ex) { + + // Will be rendered as an INTERNAL_ERROR. + throw new RuntimeException(ex); + + } + + } + +} Property changes on: branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property Modified: branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java =================================================================== --- branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java 2014-05-08 18:24:19 UTC (rev 8233) +++ branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java 2014-05-08 18:33:04 UTC (rev 8234) @@ -59,6 +59,7 @@ private DeleteServlet m_deleteServlet; private UpdateServlet m_updateServlet; private WorkbenchServlet m_workbenchServlet; + private BlueprintsServlet m_blueprintsServlet; /** * @see <a href="https://sourceforge.net/apps/trac/bigdata/ticket/584"> @@ -84,6 +85,7 @@ m_deleteServlet = new DeleteServlet(); m_describeServlet = new DescribeCacheServlet(); m_workbenchServlet = new WorkbenchServlet(); + m_blueprintsServlet = new BlueprintsServlet(); m_queryServlet.init(getServletConfig()); m_insertServlet.init(getServletConfig()); @@ -91,6 +93,7 @@ m_deleteServlet.init(getServletConfig()); m_describeServlet.init(getServletConfig()); m_workbenchServlet.init(getServletConfig()); + m_blueprintsServlet.init(getServletConfig()); } @@ -130,6 +133,11 @@ m_workbenchServlet = null; } + if (m_blueprintsServlet != null) { + m_blueprintsServlet.destroy(); + m_blueprintsServlet = null; + } + super.destroy(); } @@ -242,6 +250,10 @@ m_workbenchServlet.doPost(req, resp); + } else if (req.getParameter(BlueprintsServlet.ATTR_BLUEPRINTS) != null) { + + m_blueprintsServlet.doPost(req, resp); + } else if (req.getParameter("uri") != null) { // INSERT via w/ URIs Modified: branches/BLUEPRINTS/build.xml =================================================================== --- branches/BLUEPRINTS/build.xml 2014-05-08 18:24:19 UTC (rev 8233) +++ branches/BLUEPRINTS/build.xml 2014-05-08 18:33:04 UTC (rev 8234) @@ -65,6 +65,9 @@ <fileset dir="${bigdata.dir}/bigdata-gom/lib"> <include name="**/*.jar" /> </fileset> + <fileset dir="${bigdata.dir}/bigdata-blueprints/lib"> + <include name="**/*.jar" /> + </fileset> <!-- <fileset dir="${bigdata.dir}/ctc-striterator/lib"> <include name="**/*.jar" /> @@ -228,6 +231,7 @@ <src path="${bigdata.dir}/bigdata/src/java" /> <src path="${bigdata.dir}/bigdata-jini/src/java" /> <src path="${bigdata.dir}/bigdata-rdf/src/java" /> + <src path="${bigdata.dir}/bigdata-blueprints/src/java" /> <src path="${bigdata.dir}/bigdata-sails/src/java" /> <src path="${bigdata.dir}/bigdata-gom/src/java" /> <src path="${bigdata.dir}/bigdata-ganglia/src/java" /> @@ -260,6 +264,10 @@ <exclude name="**/*.java" /> <exclude name="**/package.html" /> </fileset> + <fileset dir="${bigdata.dir}/bigdata-blueprints/src/java"> + <exclude name="**/*.java" /> + <exclude name="**/package.html" /> + </fileset> <fileset dir="${bigdata.dir}/bigdata-gom/src/java"> <exclude name="**/*.java" /> <exclude name="**/package.html" /> @@ -313,6 +321,7 @@ <fileset dir="${bigdata.dir}/bigdata-gom/src/java" /> <fileset dir="${bigdata.dir}/bigdata-gom/src/samples" /> <fileset dir="${bigdata.dir}/ctc-striterators/src/java" /> + <fileset dir="${bigdata.dir}/bigdata-blueprints/src/java" /> </jar> </target> @@ -406,7 +415,8 @@ <packageset dir="${bigdata.dir}/bigdata-rdf/src/samples" /> <packageset dir="${bigdata.dir}/bigdata-sails/src/java" /> <packageset dir="${bigdata.dir}/bigdata-sails/src/samples" /> - <packageset dir="${bigdata.dir}/bigdata-gom/src/java" /> + <packageset dir="${bigdata.dir}/bigdata-blueprints/src/java" /> + <packageset dir="${bigdata.dir}/bigdata-gom/src/java" /> <packageset dir="${bigdata.dir}/bigdata-gom/src/samples" /> <packageset dir="${bigdata.dir}/bigdata-gas/src/java" /> <packageset dir="${bigdata.dir}/ctc-striterators/src/java" /> @@ -449,6 +459,9 @@ <fileset dir="${bigdata.dir}/bigdata-gom/lib"> <include name="**/*.jar" /> </fileset> + <fileset dir="${bigdata.dir}/bigdata-blueprints/lib"> + <include name="**/*.jar" /> + </fileset> </copy> <!-- Do NOT flatten the jini jars. We need the to preserve the --> <!-- lib, lib-dl, and lib-ext distinctions. --> @@ -558,6 +571,7 @@ <fileset dir="${bigdata.dir}/bigdata" includes="LEGAL/*"/> <fileset dir="${bigdata.dir}/bigdata-rdf" includes="LEGAL/*"/> <fileset dir="${bigdata.dir}/bigdata-sails" includes="LEGAL/*"/> + <fileset dir="${bigdata.dir}/bigdata-blueprints" includes="LEGAL/*"/> <fileset dir="${bigdata.dir}/bigdata-gom" includes="LEGAL/*"/> <fileset dir="${bigdata.dir}/bigdata-jini" includes="LEGAL/*"/> <!-- bigdata jar plus some dependencies as filtered by autojar. @@ -934,6 +948,7 @@ <property name="bigdata-rdf.lib" location="${bigdata.dir}/bigdata-rdf/lib" /> <property name="bigdata-sails.lib" location="${bigdata.dir}/bigdata-sails/lib" /> <property name="bigdata-gom.lib" location="${bigdata.dir}/bigdata-gom/lib" /> + <property name="bigdata-blueprints.lib" location="${bigdata.dir}/bigdata-blueprints/lib" /> <property name="bigdata-jetty.lib" location="${bigdata.dir}/bigdata/lib/jetty" /> <property name="bigdata-http.lib" location="${bigdata.dir}/bigdata-sails/lib/httpcomponents" /> <property name="bigdata-zookeeper.lib" location="${bigdata.dir}/bigdata-jini/lib/apache" /> @@ -976,6 +991,10 @@ <!-- GOM library --> <!-- Note: Nothing yet for GOM --> + <!-- Blueprints library --> + <copy file="${bigdata-blueprints.lib}/blueprints-core-${blueprints.version}.jar" + tofile="${dist.lib}/blueprints-core.jar" /> + <!-- jetty library --> <copy file="${bigdata-jetty.lib}/jetty-continuation-${jetty.version}.jar" tofile="${dist.lib}/jetty-continuation.jar" /> @@ -1387,6 +1406,9 @@ <copy toDir="${build.dir}/bigdata-gom/src"> <fileset dir="${bigdata.dir}/bigdata-gom/src" /> </copy> + <copy toDir="${build.dir}/bigdata-blueprints/src"> + <fileset dir="${bigdata.dir}/bigdata-blueprints/src" /> + </copy> <copy toDir="${build.dir}/bigdata-war/src"> <fileset dir="${bigdata.dir}/bigdata-war/src" /> </copy> @@ -1424,7 +1446,12 @@ <copy toDir="${build.dir}/bigdata-sails/lib"> <fileset dir="${bigdata.dir}/bigdata-sails/lib" /> </copy> + <mkdir dir="${build.dir}/bigdata-blueprints/lib" /> + <copy toDir="${build.dir}/bigdata-blueprints/lib"> + <fileset dir="${bigdata.dir}/bigdata-blueprints/lib" /> + </copy> + <mkdir dir="${build.dir}/src" /> <mkdir dir="${build.dir}/src/resources" /> <mkdir dir="${build.dir}/src/resources/config" /> @@ -1478,6 +1505,7 @@ <include name="bigdata-rdf/src/**" /> <include name="bigdata-sails/src/**" /> <include name="bigdata-gom/src/**" /> + <include name="bigdata-blueprints/src/**" /> <include name="bigdata-war/src/**" /> <include name="ctc-striterators/src/**" /> <include name="lgpl-utils/src/**" /> @@ -1488,6 +1516,7 @@ <include name="bigdata-rdf/lib/**" /> <include name="bigdata-sails/lib/**" /> <include name="bigdata-gom/lib/**" /> + <include name="bigdata-blueprints/lib/**" /> <include name="src/**" /> <exclude name="classes/**" /> <exclude name="${version}.jar" /> @@ -1549,6 +1578,7 @@ <include name="bigdata-rdf/LEGAL/*" /> <include name="bigdata-sails/LEGAL/*" /> <include name="bigdata-gom/LEGAL/*" /> + <include name="bigdata-blueprints/LEGAL/*" /> </fileset> </copy> @@ -1747,6 +1777,8 @@ <property name="sesame-sparql-test.jar" location="${bigdata-sails.lib}/sesame-sparql-testsuite-${sesame.version}.jar" /> <property name="sesame-store-test.jar" location="${bigdata-sails.lib}/sesame-store-testsuite-${sesame.version}.jar" /> <property name="sesame-rio-test.jar" location="${bigdata-sails.lib}/sesame-rio-testsuite-${sesame.version}.jar" /> + <property name="blueprints-test.jar" location="${bigdata-blueprints.lib}/blueprints-test-${blueprints.version}.jar" /> + <property name="jettison.jar" location="${bigdata-blueprints.lib}/jettison-${jettison.version}.jar" /> <property name="classes.test.dir" location="${classes.dir}/test" /> <mkdir dir="${classes.test.dir}" /> @@ -1757,7 +1789,7 @@ <!-- TODO ${path.separator}${dist.lib}/bigdata-gas.jar --> <property name="javac.test.classpath" - value="${classes.dir}${path.separator}${junit.jar}${path.separator}${junit-ext.jar}${path.separator}${sesame-sparql-test.jar}${path.separator}${sesame-store-test.jar}${path.separator}${sesame-rio-test.jar}${path.separator}${dist.lib}/classserver.jar${path.separator}${dist.lib}/highscalelib.jar${path.separator}${dist.lib}/dsiutils.jar${path.separator}${dist.lib}/lgplutils.jar${path.separator}${dist.lib}/fastutil.jar${path.separator}${dist.lib}/bigdata-ganglia.jar${path.separator}${dist.lib}/icu4j.jar${path.separator}${dist.lib}/icu4j-charset.jar${path.separator}${dist.lib}/log4j.jar${path.separator}${dist.lib}/lucene-analyzer.jar${path.separator}${dist.lib}/lucene-core.jar${path.separator}${path.separator}${dist.lib}/openrdf-sesame.jar${path.separator}${dist.lib}/slf4j.jar${path.separator}${dist.lib}/jsk-lib.jar${path.separator}${dist.lib}/jsk-platform.jar${path.separator}${dist.lib}/nxparser.jar${path.separator}${dist.lib}/zookeeper.jar${path.separator}${dist.lib}/jetty-continuation.jar${path.separator}${dist.lib}/jetty-http.jar${path.separator}${dist.lib}/jetty-io.jar${path.separator}${dist.lib}/jetty-jmx.jar${path.separator}${dist.lib}/jetty-jndi.jar${path.separator}${dist.lib}/jetty-server.jar${path.separator}${dist.lib}/jetty-util.jar${path.separator}${dist.lib}/jetty-webapp.jar${path.separator}${dist.lib}/jetty-servlet.jar${path.separator}${dist.lib}/jetty-security.jar${path.separator}${dist.lib}/jetty-xml.jar${path.separator}${dist.lib}/jetty-rewrite.jar${path.separator}${dist.lib}/jetty-client.jar${path.separator}${dist.lib}/jetty-proxy.jar${path.separator}${dist.lib}/servlet-api.jar${path.separator}${dist.lib}/commons-codec.jar${path.separator}${dist.lib}/commons-fileupload.jar${path.separator}${dist.lib}/commons-io.jar${path.separator}${dist.lib}/commons-logging.jar${path.separator}${dist.lib}/httpclient.jar${path.separator}${dist.lib}/httpclient-cache.jar${path.separator}${dist.lib}/httpcore.jar${path.separator}${dist.lib}/httpmime.jar" /> + value="${classes.dir}${path.separator}${junit.jar}${path.separator}${junit-ext.jar}${path.separator}${sesame-sparql-test.jar}${path.separator}${sesame-store-test.jar}${path.separator}${sesame-rio-test.jar}${path.separator}${dist.lib}/classserver.jar${path.separator}${dist.lib}/highscalelib.jar${path.separator}${dist.lib}/dsiutils.jar${path.separator}${dist.lib}/lgplutils.jar${path.separator}${dist.lib}/fastutil.jar${path.separator}${dist.lib}/bigdata-ganglia.jar${path.separator}${dist.lib}/icu4j.jar${path.separator}${dist.lib}/icu4j-charset.jar${path.separator}${dist.lib}/log4j.jar${path.separator}${dist.lib}/lucene-analyzer.jar${path.separator}${dist.lib}/lucene-core.jar${path.separator}${path.separator}${dist.lib}/openrdf-sesame.jar${path.separator}${dist.lib}/slf4j.jar${path.separator}${dist.lib}/jsk-lib.jar${path.separator}${dist.lib}/jsk-platform.jar${path.separator}${dist.lib}/nxparser.jar${path.separator}${dist.lib}/zookeeper.jar${path.separator}${dist.lib}/jetty-continuation.jar${path.separator}${dist.lib}/jetty-http.jar${path.separator}${dist.lib}/jetty-io.jar${path.separator}${dist.lib}/jetty-jmx.jar${path.separator}${dist.lib}/jetty-jndi.jar${path.separator}${dist.lib}/jetty-server.jar${path.separator}${dist.lib}/jetty-util.jar${path.separator}${dist.lib}/jetty-webapp.jar${path.separator}${dist.lib}/jetty-servlet.jar${path.separator}${dist.lib}/jetty-security.jar${path.separator}${dist.lib}/jetty-xml.jar${path.separator}${dist.lib}/jetty-rewrite.jar${path.separator}${dist.lib}/jetty-client.jar${path.separator}${dist.lib}/jetty-proxy.jar${path.separator}${dist.lib}/servlet-api.jar${path.separator}${dist.lib}/commons-codec.jar${path.separator}${dist.lib}/commons-fileupload.jar${path.separator}${dist.lib}/commons-io.jar${path.separator}${dist.lib}/commons-logging.jar${path.separator}${dist.lib}/httpclient.jar${path.separator}${dist.lib}/httpclient-cache.jar${path.separator}${dist.lib}/httpcore.jar${path.separator}${dist.lib}/httpmime.jar${path.separator}${dist.lib}/blueprints-core.jar${path.separator}${blueprints-test.jar}${path.separator}${jettison.jar}" /> <echo>javac </echo> @@ -1803,6 +1835,7 @@ <src path="${bigdata.dir}/bigdata-rdf/src/test" /> <src path="${bigdata.dir}/bigdata-sails/src/test" /> <src path="${bigdata.dir}/bigdata-gom/src/test" /> + <src path="${bigdata.dir}/bigdata-blueprints/src/test" /> <src path="${bigdata.dir}/bigdata-gas/src/test" /> <src path="${bigdata.dir}/bigdata-ganglia/src/test" /> <src path="${bigdata.dir}/ctc-striterators/src/test" /> @@ -1860,6 +1893,9 @@ <fileset dir="${bigdata.dir}/bigdata-jini/src/test"> <exclude name="**/*.java" /> </fileset> + <fileset dir="${bigdata.dir}/bigdata-blueprints/src/test"> + <exclude name="**/*.java" /> + </fileset> </jar> @@ -2473,7 +2509,7 @@ </java> </target> - <target name="start" depends="compile" description="Start the Bigdata Server."> + <target name="start-bigdata" depends="compile" description="Start the Bigdata Server."> <java classname="com.bigdata.rdf.sail.webapp.NanoSparqlServer" failonerror="true" fork="true" logerror="true"> <classpath refid="runtime.classpath" /> <jvmarg value="-server"/> @@ -2485,5 +2521,93 @@ </java> </target> - + <target name="fetch-gremlin" depends="prepare,compile,jar"> + <echo>Installing Gremlin...</echo> + <get + src="http://www.tinkerpop.com/downloads/gremlin/gremlin-groovy-2.5.0.zip" + dest="${build.dir}/gremlin-groovy-2.5.0.zip"/> + <unzip src="${build.dir}/gremlin-groovy-2.5.0.zip" dest="${build.dir}/"/> + <delete file="${build.dir}/gremlin-groovy-2.5.0.zip"/> + </target> + + <target name="install-gremlin" depends="prepare,compile,jar,bundle"> + <delete> + <fileset dir="${build.dir}/gremlin-groovy-2.5.0/lib"> + <include name="blueprints-graph-sail-2.5.0.jar"/> + <include name="blueprints-sail-graph-2.5.0.jar"/> + <include name="jsonld-java-0.3.jar"/> + <include name="jsonld-java-sesame-0.3.jar"/> + <include name="linked-data-sail-1.1.jar"/> + <include name="repository-sail-1.8.jar"/> + <include name="semargl-core-0.4.jar"/> + <include name="semargl-rdf-0.4.jar"/> + <include name="semargl-rdfa-0.4.jar"/> + <include name="semargl-sesame-0.4.jar"/> + <include name="sesame-http-client-2.7.10.jar"/> + <include name="sesame-http-protocol-2.7.10.jar"/> + <include name="sesame-model-2.7.10.jar"/> + <include name="sesame-query-2.7.10.jar"/> + <include name="sesame-queryalgebra-evaluation-2.7.10.jar"/> + <include name="sesame-queryalgebra-model-2.7.10.jar"/> + <include name="sesame-queryparser-api-2.7.10.jar"/> + <include name="sesame-queryparser-serql-2.7.10.jar"/> + <include name="sesame-queryparser-sparql-2.7.10.jar"/> + <include name="sesame-queryresultio-api-2.7.10.jar"/> + <include name="sesame-queryresultio-sparqlxml-2.7.10.jar"/> + <include name="sesame-repository-api-2.7.10.jar"/> + <include name="sesame-repository-sparql-2.7.10.jar"/> + <include name="sesame-rio-api-2.7.10.jar"/> + <include name="sesame-rio-binary-2.7.10.jar"/> + <include name="sesame-rio-datatypes-2.7.10.jar"/> + <include name="sesame-rio-languages-2.7.10.jar"/> + <include name="sesame-rio-n3-2.7.10.jar"/> + <include name="sesame-rio-nquads-2.7.10.jar"/> + <include name="sesame-rio-ntriples-2.7.10.jar"/> + <include name="sesame-rio-rdfjson-2.7.10.jar"/> + <include name="sesame-rio-rdfxml-2.7.10.jar"/> + <include name="sesame-rio-trig-2.7.10.jar"/> + <include name="sesame-rio-trix-2.7.10.jar"/> + <include name="sesame-rio-turtle-2.7.10.jar"/> + <include name="sesame-sail-api-2.7.10.jar"/> + <include name="sesame-sail-inferencer-2.7.10.jar"/> + <include name="sesame-sail-memory-2.7.10.jar"/> + <include name="sesame-sail-nativerdf-2.7.10.jar"/> + <include name="sesame-util-2.7.10.jar"/> + <include name="bigdata-*.jar"/> + </fileset> + </delete> + <copy toDir="${build.dir}/gremlin-groovy-2.5.0/lib" flatten="true"> + <!-- + <fileset dir="${bigdata.dir}/bigdata-rdf/lib"> + <include name="openrdf-sesame-${sesame.version}-onejar.jar" /> + </fileset> + <fileset dir="${bigdata.dir}/bigdata-sails/lib/httpcomponents"> + <include name="httpmime-${apache.httpmime.version}.jar" /> + </fileset> + --> + <fileset dir="${build.dir}/lib"> + <include name="*.jar" /> + </fileset> + <fileset dir="${build.dir}"> + <include name="${version}.jar" /> + </fileset> + </copy> + <chmod file="${build.dir}/gremlin-groovy-2.5.0/bin/gremlin.sh" perm="+x"/> + <echo>Gremlin installation complete. +0. Make sure the bigdata server is running: + > ant start-bigdata +1. Start the gremlin console: + > ./${build.dir}/gremlin-groovy-2.5.0/bin/gremlin.sh +2. Connect to the bigdata server: + gremlin> import com.bigdata.blueprints.* + gremlin> g = BigdataGraphFactory.connect("http://localhost:9999") +3. Don't forget to shut down the connection when you're done: + gremlin> g.shutdown() + </echo> + + </target> + + <target name="gremlin" depends="fetch-gremlin,install-gremlin"> + </target> + </project> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2014-05-08 18:24:22
|
Revision: 8233 http://sourceforge.net/p/bigdata/code/8233 Author: mrpersonick Date: 2014-05-08 18:24:19 +0000 (Thu, 08 May 2014) Log Message: ----------- New branch for blueprints. Modified Paths: -------------- branches/BLUEPRINTS/bigdata/src/resources/logging/log4j.properties branches/BLUEPRINTS/bigdata-sails/src/test/com/bigdata/rdf/sail/874.rq branches/BLUEPRINTS/bigdata-sails/src/test/com/bigdata/rdf/sail/TestTicket275.java Added Paths: ----------- branches/BLUEPRINTS/ branches/BLUEPRINTS/bigdata-war/src/WEB-INF/GraphStore.properties branches/BLUEPRINTS/graph-example-1.xml Index: branches/BLUEPRINTS =================================================================== --- branches/BIGDATA_RELEASE_1_3_0 2014-05-08 18:09:18 UTC (rev 8232) +++ branches/BLUEPRINTS 2014-05-08 18:24:19 UTC (rev 8233) Property changes on: branches/BLUEPRINTS ___________________________________________________________________ Added: svn:ignore ## -0,0 +1,31 ## +ant-build +src +bin +bigdata*.jar +ant-release +standalone +test* +countersfinal.xml +events.jnl +.settings +*.jnl +TestInsertRate.out +SYSTAP-BBT-result.txt +U10load+query +*.hprof +com.bigdata.cache.TestHardReferenceQueueWithBatchingUpdates.exp.csv +commit-log.txt +eventLog +dist +bigdata-test +com.bigdata.rdf.stress.LoadClosureAndQueryTest.*.csv +DIST.bigdata-*.tgz +REL.bigdata-*.tgz +queryLog* +queryRunState* +sparql.txt +benchmark +CI +bsbm10-dataset.nt.gz +bsbm10-dataset.nt.zip +benchmark* Added: svn:mergeinfo ## -0,0 +1,20 ## +/branches/BIGDATA_MGC_HA1_HA5:8025-8122 +/branches/BIGDATA_OPENRDF_2_6_9_UPDATE:6769-6785 +/branches/BIGDATA_RELEASE_1_2_0:6766-7380 +/branches/BTREE_BUFFER_BRANCH:2004-2045 +/branches/DEV_BRANCH_27_OCT_2009:2270-2546,2548-2782 +/branches/INT64_BRANCH:4486-4522 +/branches/JOURNAL_HA_BRANCH:2596-4066 +/branches/LARGE_LITERALS_REFACTOR:4175-4387 +/branches/LEXICON_REFACTOR_BRANCH:2633-3304 +/branches/MGC_1_3_0:7609-7752 +/branches/QUADS_QUERY_BRANCH:4525-4531,4550-4584,4586-4609,4634-4643,4646-4672,4674-4685,4687-4693,4697-4735,4737-4782,4784-4792,4794-4796,4798-4801 +/branches/RDR:7665-8159 +/branches/READ_CACHE:7215-7271 +/branches/RWSTORE_1_1_0_DEBUG:5896-5935 +/branches/TIDS_PLUS_BLOBS_BRANCH:4814-4836 +/branches/ZK_DISCONNECT_HANDLING:7465-7484 +/branches/bugfix-btm:2594-3237 +/branches/dev-btm:2574-2730 +/branches/fko:3150-3194 +/trunk:3392-3437,3656-4061 \ No newline at end of property Modified: branches/BLUEPRINTS/bigdata/src/resources/logging/log4j.properties =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/resources/logging/log4j.properties 2014-05-08 18:09:18 UTC (rev 8232) +++ branches/BLUEPRINTS/bigdata/src/resources/logging/log4j.properties 2014-05-08 18:24:19 UTC (rev 8233) @@ -16,7 +16,9 @@ log4j.logger.com.bigdata.rdf.store.DataLoader=INFO log4j.logger.com.bigdata.resources.AsynchronousOverflowTask=INFO -#log4j.logger.com.bigdata.rdf.rio.StatementBuffer=ALL +log4j.logger.com.bigdata.rdf.sail.webapp.HALoadBalancerServlet=ERROR + +#log4j.logger.com.bigdata.blueprints=ALL #log4j.logger.com.bigdata.rdf.sail.TestProvenanceQuery=ALL #log4j.logger.com.bigdata.rdf.sail.TestSids=ALL #log4j.logger.com.bigdata.rdf.sail.ProxyBigdataSailTestCase=ALL Modified: branches/BLUEPRINTS/bigdata-sails/src/test/com/bigdata/rdf/sail/874.rq =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/874.rq 2014-05-08 18:09:18 UTC (rev 8232) +++ branches/BLUEPRINTS/bigdata-sails/src/test/com/bigdata/rdf/sail/874.rq 2014-05-08 18:24:19 UTC (rev 8233) @@ -4,14 +4,11 @@ ?user <http://arvados.org/schema/api_token> <token:ckedd> . { ?user <http://arvados.org/schema/user_is_admin> true . - ?s ?p ?o . - FILTER strStarts(str(?s), "http://arvados.org/schema/modified") . } union { - ?user <http://arvados.org/schema/user_is_admin> false . ?user <http://arvados.org/schema/permission/can_read> ?s . +} ?s ?p ?o . FILTER strStarts(str(?s), "http://arvados.org/schema/modified") . -} } \ No newline at end of file Modified: branches/BLUEPRINTS/bigdata-sails/src/test/com/bigdata/rdf/sail/TestTicket275.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/TestTicket275.java 2014-05-08 18:09:18 UTC (rev 8232) +++ branches/BLUEPRINTS/bigdata-sails/src/test/com/bigdata/rdf/sail/TestTicket275.java 2014-05-08 18:24:19 UTC (rev 8233) @@ -95,7 +95,14 @@ RDFFormat.TURTLE); conn.commit(); - final String query = "SELECT ?lookup WHERE { ?lookup <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <os:class/Lookup> . ?lookup <os:prop/lookup/majorType> ?majorType . OPTIONAL{?lookup <os:prop/lookup/minorType> ?minorType}. FILTER(STR(?majorType) = ?argMajorType). FILTER(!bound(?minorType))}"; + final String query = "SELECT ?lookup " + + "WHERE { " + + "?lookup <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <os:class/Lookup> . " + + "?lookup <os:prop/lookup/majorType> ?majorType . " + + "OPTIONAL{?lookup <os:prop/lookup/minorType> ?minorType}. " + + "FILTER(STR(?majorType) = ?argMajorType). " + + "FILTER(!bound(?minorType))" + + "}"; final TupleQuery q = conn.prepareTupleQuery(QueryLanguage.SPARQL, query); q.setBinding("argMajorType", conn.getValueFactory() Added: branches/BLUEPRINTS/bigdata-war/src/WEB-INF/GraphStore.properties =================================================================== --- branches/BLUEPRINTS/bigdata-war/src/WEB-INF/GraphStore.properties (rev 0) +++ branches/BLUEPRINTS/bigdata-war/src/WEB-INF/GraphStore.properties 2014-05-08 18:24:19 UTC (rev 8233) @@ -0,0 +1,40 @@ +# +# Note: These options are applied when the journal and the triple store are +# first created. + +## +## Journal options. +## + +# The backing file. This contains all your data. You want to put this someplace +# safe. The default locator will wind up in the directory from which you start +# your servlet container. +com.bigdata.journal.AbstractJournal.file=bigdata.jnl + +# The persistence engine. Use 'Disk' for the WORM or 'DiskRW' for the RWStore. +com.bigdata.journal.AbstractJournal.bufferMode=DiskRW + +# Setup for the RWStore recycler rather than session protection. +com.bigdata.service.AbstractTransactionService.minReleaseAge=1 + +com.bigdata.btree.writeRetentionQueue.capacity=4000 +com.bigdata.btree.BTree.branchingFactor=128 + +# 200M initial extent. +com.bigdata.journal.AbstractJournal.initialExtent=209715200 +com.bigdata.journal.AbstractJournal.maximumExtent=209715200 + +## +## Setup for QUADS mode without the full text index. +## +com.bigdata.rdf.sail.truthMaintenance=false +com.bigdata.rdf.store.AbstractTripleStore.quads=false +com.bigdata.rdf.store.AbstractTripleStore.statementIdentifiers=false +com.bigdata.rdf.store.AbstractTripleStore.textIndex=true +com.bigdata.rdf.store.AbstractTripleStore.axiomsClass=com.bigdata.rdf.axioms.NoAxioms + +# Bump up the branching factor for the lexicon indices on the default kb. +com.bigdata.namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor=400 + +# Bump up the branching factor for the statement indices on the default kb. +com.bigdata.namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor=1024 Added: branches/BLUEPRINTS/graph-example-1.xml =================================================================== --- branches/BLUEPRINTS/graph-example-1.xml (rev 0) +++ branches/BLUEPRINTS/graph-example-1.xml 2014-05-08 18:24:19 UTC (rev 8233) @@ -0,0 +1,54 @@ +<?xml version="1.0" encoding="UTF-8"?> +<graphml xmlns="http://graphml.graphdrawing.org/xmlns" + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns + http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd"> + <key id="weight" for="edge" attr.name="weight" attr.type="float"/> + <key id="name" for="node" attr.name="name" attr.type="string"/> + <key id="age" for="node" attr.name="age" attr.type="int"/> + <key id="lang" for="node" attr.name="lang" attr.type="string"/> + <graph id="G" edgedefault="directed"> + <node id="1"> + <data key="name">marko</data> + <data key="age">29</data> + </node> + <node id="2"> + <data key="name">vadas</data> + <data key="age">27</data> + </node> + <node id="3"> + <data key="name">lop</data> + <data key="lang">java</data> + </node> + <node id="4"> + <data key="name">josh</data> + <data key="age">32</data> + </node> + <node id="5"> + <data key="name">ripple</data> + <data key="lang">java</data> + </node> + <node id="6"> + <data key="name">peter</data> + <data key="age">35</data> + </node> + <edge id="7" source="1" target="2" label="knows"> + <data key="weight">0.5</data> + </edge> + <edge id="8" source="1" target="4" label="knows"> + <data key="weight">1.0</data> + </edge> + <edge id="9" source="1" target="3" label="created"> + <data key="weight">0.4</data> + </edge> + <edge id="10" source="4" target="5" label="created"> + <data key="weight">1.0</data> + </edge> + <edge id="11" source="4" target="3" label="created"> + <data key="weight">0.4</data> + </edge> + <edge id="12" source="6" target="3" label="created"> + <data key="weight">0.2</data> + </edge> + </graph> +</graphml> \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-08 18:09:21
|
Revision: 8232 http://sourceforge.net/p/bigdata/code/8232 Author: dmekonnen Date: 2014-05-08 18:09:18 +0000 (Thu, 08 May 2014) Log Message: ----------- Archiving before next final round of updates. Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/default/bigdataHA.erb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Berksfile branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Vagrantfile branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/files/default/test/default_test.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/metadata.rb Added Paths: ----------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Vagrantfile.aws branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/java7.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/nss.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/tomcat.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/templates/ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/templates/default/ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/templates/default/RWStore.properties.erb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/templates/default/log4j.properties.erb Removed Paths: ------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/default.rb Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb 2014-05-08 17:53:52 UTC (rev 8231) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb 2014-05-08 18:09:18 UTC (rev 8232) @@ -1,6 +1,6 @@ # Who runs bigdata? -default['systap-bigdataHA'][:bigdata_user] = "bigdata" -default['systap-bigdataHA'][:bigdata_group] = "bigdata" +default['systap-bigdataHA'][:user] = "bigdata" +default['systap-bigdataHA'][:group] = "bigdata" # Where to find and build bigdata code default['systap-bigdataHA'][:svn] = "https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA_RELEASE_1_3_0" Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb 2014-05-08 17:53:52 UTC (rev 8231) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb 2014-05-08 18:09:18 UTC (rev 8232) @@ -12,8 +12,8 @@ append true end -user "#{node['systap-bigdataHA'][:bigdata_user]}" do - gid "#{node['systap-bigdataHA'][:bigdata_group]}" +user "#{node['systap-bigdataHA'][:user]}" do + gid "#{node['systap-bigdataHA'][:group]}" supports :manage_home => true shell "/bin/false" home "#{node['systap-bigdataHA'][:fed_dir]}" @@ -25,7 +25,7 @@ user "root" group "root" cwd "#{node['systap-bigdataHA'][:fed_dir]}" - command "chown -R #{node['systap-bigdataHA'][:bigdata_user]}:#{node['systap-bigdataHA'][:bigdata_group]} ." + command "chown -R #{node['systap-bigdataHA'][:user]}:#{node['systap-bigdataHA'][:group]} ." end execute "checkout bigdata from svn repo" do Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/default/bigdataHA.erb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/default/bigdataHA.erb 2014-05-08 17:53:52 UTC (rev 8231) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/default/bigdataHA.erb 2014-05-08 18:09:18 UTC (rev 8232) @@ -7,8 +7,8 @@ # pid file that is written by $binDir/startHAServices. These SHOULD be # absolute path names. -BD_USER="<%= node['systap-bigdataHA'][:bigdata_user] %>" -BD_GROUP="<%= node['systap-bigdataHA'][:bigdata_group] %>" +BD_USER="<%= node['systap-bigdataHA'][:user] %>" +BD_GROUP="<%= node['systap-bigdataHA'][:group] %>" binDir=<%= node['systap-bigdataHA'][:fed_dir] %>/bin pidFile=<%= node['systap-bigdataHA'][:fed_dir] %>/var/lock/pid Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Berksfile =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Berksfile 2014-05-08 17:53:52 UTC (rev 8231) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Berksfile 2014-05-08 18:09:18 UTC (rev 8232) @@ -1,7 +1,7 @@ site :opscode cookbook "apt" -cookbook "java", "~> 1.14.0" +cookbook "java", '~> 1.22.0' cookbook "tomcat" metadata Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Vagrantfile =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Vagrantfile 2014-05-08 17:53:52 UTC (rev 8231) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Vagrantfile 2014-05-08 18:09:18 UTC (rev 8232) @@ -1,5 +1,7 @@ # -*- mode: ruby -*- # vi: set ft=ruby : +# ENV['VAGRANT_DEFAULT_PROVIDER'] = 'aws' + Vagrant.require_plugin "vagrant-berkshelf" Vagrant.configure("2") do |config| @@ -7,88 +9,73 @@ # options are documented and commented below. For a complete reference, # please see the online documentation at vagrantup.com. - config.vm.hostname = "systap-bigdata" - - # Every Vagrant virtual environment requires a box to build off of. config.vm.box = "precise64" + config.vm.hostname = "bigdata" - # The url from where the 'config.vm.box' box will be fetched if it - # doesn't already exist on the user's system. - # config.vm.box_url = "http://repo.px.net/vagrant/#{config.vm.box}.box" - config.vm.box_url = "http://files.vagrantup.com/precise64.box" + config.berkshelf.enabled = true - # Assign this VM to a host-only network IP, allowing you to access it - # via the IP. Host-only networks can talk to the host machine as well as - # any other machines on the same network, but cannot be accessed (through this - # network interface) by any external networks. - config.vm.network :private_network, ip: "33.33.33.10" + # An array of symbols representing groups of cookbook described in the Vagrantfile + # to exclusively install and copy to Vagrant's shelf. + # config.berkshelf.only = [] - # Create a public network, which generally matched to bridged network. - # Bridged networks make the machine appear as another physical device on - # your network. + # An array of symbols representing groups of cookbook described in the Vagrantfile + # to skip installing and copying to Vagrant's shelf. + # config.berkshelf.except = [] - # config.vm.network :public_network + config.vm.provider :virtualbox do |vb| + vb.vm.box_url = "http://files.vagrantup.com/precise64.box" - # Create a forwarded port mapping which allows access to a specific port - # within the machine from a port on the host machine. In the example below, - # accessing "localhost:8080" will access port 80 on the guest machine. + vb.vm.network :private_network, ip: "33.33.33.10" + end - # Share an additional folder to the guest VM. The first argument is - # the path on the host to the actual folder. The second argument is - # the path on the guest to mount the folder. And the optional third - # argument is a set of non-required options. - # config.vm.synced_folder "../data", "/vagrant_data" - # Provider-specific configuration so you can fine-tune various - # backing providers for Vagrant. These expose provider-specific options. - # Example for VirtualBox: - # - # config.vm.provider :virtualbox do |vb| - # # Don't boot with headless mode - # vb.gui = true - # - # # Use VBoxManage to customize the VM. For example to change memory: - # vb.customize ["modifyvm", :id, "--memory", "1024"] - # end - # - # View the documentation for the provider you're using for more - # information on available options. + config.vm.provider :aws do |aws, override| + override.vm.box = "dummy" - # The path to the Berksfile to use with Vagrant Berkshelf - # config.berkshelf.berksfile_path = "./Berksfile" + aws.access_key_id = "AKIAJ26S27XQRS5LFXCQ" + aws.secret_access_key = "BPBric3lzzE9lHV3Hwz+vG9TQ/e1fOugytYz1LFV" + aws.keypair_name = "systap" - # Enabling the Berkshelf plugin. To enable this globally, add this configuration - # option to your ~/.vagrant.d/Vagrantfile file - config.berkshelf.enabled = true + aws.ami = "ami-a73264ce" + + # + # + # + aws.region = "us-east-1" + aws.instance_type = "t1.micro" + aws.security_groups = [ "launch-wizard-4" ] - # An array of symbols representing groups of cookbook described in the Vagrantfile - # to exclusively install and copy to Vagrant's shelf. - # config.berkshelf.only = [] + aws.tags = { + 'Name' => 'Systap Bigdata' + } - # An array of symbols representing groups of cookbook described in the Vagrantfile - # to skip installing and copying to Vagrant's shelf. - # config.berkshelf.except = [] + override.ssh.username = "ubuntu" + override.ssh.private_key_path = "/Users/dmekonnen/.ssh/systap.pem" + end + config.vm.provision :chef_solo do |chef| chef.json = { - "systap-bigdata" => { - # "1.3.0" => { :version => "1.3.5", :url => "http://repo.px.net/software/bigdata-1.3.0.war" - }, - :tomcat => { - :base_version => "7" - }, - :java => { - "install_flavor" => "oracle", - :jdk_version => "7", - :oracle => { 'accept_oracle_download_terms' => true } - } + :bigdata => { + :install_type => "nss" + # , :build_from_svn => true + }, + :java => { + "install_flavor" => "oracle", + :jdk_version => "7", + :oracle => { 'accept_oracle_download_terms' => true } + }, + :tomcat => { + :base_version => "7" + } } + # config.vm.provision :shell, inline: "sudo apt-get update ; sudo curl -L https://www.opscode.com/chef/install.sh | sudo bash" + config.vm.provision :shell, inline: "sudo apt-get update" + chef.run_list = [ - "recipe[apt]", - "recipe[java]", - "recipe[tomcat::default]", - "recipe[systap-bigdata::default]" + "recipe[bigdata::nss]" ] + end end Added: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Vagrantfile.aws =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Vagrantfile.aws (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/Vagrantfile.aws 2014-05-08 18:09:18 UTC (rev 8232) @@ -0,0 +1,70 @@ +# -*- mode: ruby -*- +# vi: set ft=ruby : +ENV['VAGRANT_DEFAULT_PROVIDER'] = 'aws' + +Vagrant.require_plugin "vagrant-berkshelf" + +Vagrant.configure("2") do |config| + # All Vagrant configuration is done here. The most common configuration + # options are documented and commented below. For a complete reference, + # please see the online documentation at vagrantup.com. + + config.vm.box = "dummy" + config.vm.hostname = "systap-bigdata" + + config.berkshelf.enabled = true + + # An array of symbols representing groups of cookbook described in the Vagrantfile + # to exclusively install and copy to Vagrant's shelf. + # config.berkshelf.only = [] + + # An array of symbols representing groups of cookbook described in the Vagrantfile + # to skip installing and copying to Vagrant's shelf. + # config.berkshelf.except = [] + + config.vm.provider :aws do |aws, override| + aws.access_key_id = "AKIAJ26S27XQRS5LFXCQ" + aws.secret_access_key = "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" + aws.keypair_name = "systap" + + aws.ami = "ami-a73264ce" + + # + # + # + aws.region = "us-east-1" + aws.instance_type = "t1.micro" + aws.security_groups = [ "launch-wizard-4" ] + + aws.tags = { + 'Name' => 'Systap Bigdata' + } + + override.ssh.username = "ubuntu" + override.ssh.private_key_path = "/Users/dmekonnen/.ssh/systap.pem" + end + + config.vm.provision :chef_solo do |chef| + chef.json = { + :bigdata => { + :install_type => "nss" + }, + :java => { + "install_flavor" => "oracle", + :jdk_version => "7", + :oracle => { 'accept_oracle_download_terms' => true } + }, + :tomcat => { + :base_version => "7" + } + } + + config.vm.provision :shell, inline: "sudo apt-get update ; sudo curl -L https://www.opscode.com/chef/install.sh | sudo bash" + # config.vm.provision :shell, inline: "sudo apt-get update" + + chef.run_list = [ + "recipe[bigdata::nss]" + ] + + end +end Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb 2014-05-08 17:53:52 UTC (rev 8231) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb 2014-05-08 18:09:18 UTC (rev 8232) @@ -1,6 +1,80 @@ -default['tomcat']['base_version'] = 7 -default['systap-bigdata'][:url] = "http://sourceforge.net/projects/bigdata/files/bigdata/1.3.0/bigdata.war/download" -webapp_dir = node['tomcat']['webapp_dir'] -default['systap-bigdata'][:home] = webapp_dir + "/bigdata" -default['systap-bigdata'][:etc] = webapp_dir + "/bigdata/etc" +# default['bigdata'][:url] = "http://sourceforge.net/projects/bigdata/files/bigdata/1.3.0/bigdata.war/download" +default['bigdata'][:url] = "http://softlayer-dal.dl.sourceforge.net/project/bigdata/bigdata/1.3.0/bigdata.war" +default['bigdata'][:home] = "/var/lib/bigdata" + +# Who runs bigdata? +default['bigdata'][:user] = "bigdata" +default['bigdata'][:group] = "bigdata" + +default['bigdata'][:properties] = default['bigdata'][:home] + "RWStore.properties" + + +case node['bigdata'][:install_type] +when "nss" + default['bigdata'][:url] = "http://bigdata.com/deploy/bigdata-1.3.0.tgz" + + # Where the jetty resourceBase is defined: + default['bigdata'][:jetty_dir] = node['bigdata'][:home] + "/var/jetty" + + # Where the log files will live: + default['bigdata'][:log_dir] = node['bigdata'][:home] + "/var/log" + + # Where the bigdata-ha.jnl file will live: + default['bigdata'][:data_dir] = node['bigdata'][:home] + "/var/data" +when "nss_svn" + default['bigdata'][:url] = "http://bigdata.com/deploy/bigdata-1.3.0.tgz" +when "tomcat_svn" + default['bigdata'][:svn_branch] = "https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA_RELEASE_1_3_0" +else + default['tomcat'][:base_version] = 7 + default['tomcat'][:java_options] = "-Djava.awt.headless=true -server -Xmx4G -XX:+UseG1GC" + + default['bigdata'][:web_home] = default['tomcat'][:webapp_dir] + "/bigdata" + default['bigdata'][:web_xml] = default['bigdata'][:web_home] + "/WEB-INF/web.xml" + default['bigdata'][:log4j_properties] = default['bigdata'][:web_home] + "/WEB-INF/classes/log4j.properties" + + # Where the bigdata-ha.jnl file will live: + default['bigdata'][:data_dir] = node['bigdata'][:home] + "/data" + + # Where the log files will live: + default['bigdata'][:log_dir] = node['bigdata'][:home] + "/log" +end + + +############################################################## +# +# Set the RWStore.properties attributes that apply for all +# installation scenarios. +# +############################################################## + + +default['bigdata']['journal.AbstractJournal.bufferMode'] = "DiskRW" + +# Setup for the RWStore recycler rather than session protection. +default['bigdata']['service.AbstractTransactionService.minReleaseAge']= "1" + +default['bigdata']['btree.writeRetentionQueue.capacity'] = "4000" +default['bigdata']['btree.BTree.branchingFactor'] = "128" + +# 200M initial extent. +default['bigdata']['journal.AbstractJournal.initialExtent'] = "209715200" +default['bigdata']['journal.AbstractJournal.maximumExtent'] = "209715200" + +## +## Setup for QUADS mode without the full text index. +## +default['bigdata']['rdf.sail.truthMaintenance'] = "false" +default['bigdata']['rdf.store.AbstractTripleStore.quads'] = "false" +default['bigdata']['rdf.store.AbstractTripleStore.statementIdentifiers'] = "false" +default['bigdata']['rdf.store.AbstractTripleStore.textIndex'] = "false" +default['bigdata']['rdf.store.AbstractTripleStore.axiomsClass'] = "com.bigdata.rdf.axioms.NoAxioms" + +# Bump up the branching factor for the lexicon indices on the default kb. +default['bigdata']['namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor'] = "400" + +# Bump up the branching factor for the statement indices on the default kb. +default['bigdata']['namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor'] = "1024" +default['bigdata']['rdf.sail.bufferCapacity'] = "100000" +# default['bigdata']['rdf.store.AbstractTripleStore.vocabularyClass'] = "" Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/files/default/test/default_test.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/files/default/test/default_test.rb 2014-05-08 17:53:52 UTC (rev 8231) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/files/default/test/default_test.rb 2014-05-08 18:09:18 UTC (rev 8232) @@ -1,6 +1,6 @@ require 'minitest/spec' -describe_recipe 'systap-bigdata::test' do - it "is running the tomcat server" do - service('tomcat').must_be_running +describe_recipe 'bigdata::test' do + it "is running the bigdata server" do + service('bigdataNSS').must_be_running end end Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/metadata.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/metadata.rb 2014-05-08 17:53:52 UTC (rev 8231) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/metadata.rb 2014-05-08 18:09:18 UTC (rev 8232) @@ -1,10 +1,10 @@ -name 'systap-bigdata' -maintainer 'Daniel Mekonnen' +name 'bigdata' +maintainer 'Daniel Mekonnen' maintainer_email 'daniel<no-spam-at>systap.com' -license 'All rights reserved' -description 'Installs/Configures systap-bigdata' +license 'All rights reserved' +description 'Installs/Configures bigdata' long_description IO.read(File.join(File.dirname(__FILE__), 'README.md')) -version '0.1.0' +version '0.1.1' depends 'apt' -depends 'java' +depends 'java', '>= 1.22.0' depends 'tomcat' Deleted: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/default.rb 2014-05-08 17:53:52 UTC (rev 8231) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/default.rb 2014-05-08 18:09:18 UTC (rev 8232) @@ -1,60 +0,0 @@ -# -# Cookbook Name:: systap-bigdata -# Recipe:: default -# -# Copyright 2013, Systap -# -# -include_recipe "tomcat" - -# -# Install the WAR file: -# -remote_file "#{node['tomcat']['webapp_dir']}/bigdata.war" do - source node['systap-bigdata'][:url] - owner node['tomcat']['user'] - group node['tomcat']['group'] -end - - -# -# Configuration for Tomcat on Ubuntu: -# -directory node['systap-bigdata'][:etc] do - owner node['tomcat']['user'] - group node['tomcat']['group'] - mode 00755 - action :create - # - # This is a little hackish. We need to wait for tomcat to extract the bigdata.war file before we can modify - # resources within the bigdata folder. We'll attempt to update this to use the chef notification system later. - # - retries 3 - retry_delay 10 -end - -execute "set absolute path for RWStore.properties" do - cwd "#{node['systap-bigdata'][:home]}/WEB-INF" - command "sed -i 's|<param-value>../webapps/bigdata/RWStore.properties|<param-value>#{node['systap-bigdata'][:home]}/RWStore.properties|' web.xml" -end - -execute "set path for bigdata.jnl file" do - cwd "#{node['systap-bigdata'][:home]}" - command "sed -i 's|=bigdata.jnl|=#{node['systap-bigdata'][:etc]}/bigdata.jnl|' RWStore.properties" -end - - -execute "set ruleLog in log4j.properties " do - cwd "#{node['systap-bigdata'][:home]}/WEB-INF/classes" - command "sed -i 's|log4j.appender.queryLog.File=queryLog.csv|log4j.appender.queryLog.File=#{node['systap-bigdata'][:etc]}/queryLog.csv|' log4j.properties" -end - -execute "set ruleLog in log4j.properties " do - cwd "#{node['systap-bigdata'][:home]}/WEB-INF/classes" - command "sed -i 's|log4j.appender.ruleLog.File=rules.log|log4j.appender.ruleLog.File=#{node['systap-bigdata'][:etc]}/rules.log|' log4j.properties" -end - -execute "set ruleLog in log4j.properties " do - cwd "#{node['systap-bigdata'][:home]}/WEB-INF/classes" - command "sed -i 's|log4j.appender.queryRunStateLog.File=queryRunStateLog.csv|log4j.appender.queryRunStateLog.File=#{node['systap-bigdata'][:etc]}/queryRunStateLog.csv|' log4j.properties" -end Added: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/java7.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/java7.rb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/java7.rb 2014-05-08 18:09:18 UTC (rev 8232) @@ -0,0 +1,32 @@ +# http://jamie.mccrindle.org/2013/07/installing-oracle-java-7-using-chef.html +# +# Cookbook Name:: java7 +# Recipe:: default +# + +apt_repository "webupd8team" do + uri "http://ppa.launchpad.net/webupd8team/java/ubuntu" + components ['main'] + distribution node['lsb']['codename'] + keyserver "keyserver.ubuntu.com" + key "EEA14886" + deb_src true +end + +execute "remove openjdk-6" do + command "apt-get -y remove --purge openjdk-6-jdk openjdk-6-jre openjdk-6-jre-headless openjdk-6-jre-lib" +end + + +# could be improved to run only on update +execute "accept-license" do + command "echo oracle-java7-installer shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections" +end + +package "oracle-java7-installer" do + action :install +end + +package "oracle-java7-set-default" do + action :install +end Added: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/nss.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/nss.rb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/nss.rb 2014-05-08 18:09:18 UTC (rev 8232) @@ -0,0 +1,85 @@ +include_recipe "java" +# +# Only do the following for tomcat install +# +if node['bigdata'][:install_type] == "nss" + + group "bigdata" do + action :create + append true + end + + user "#{node['bigdata'][:user]}" do + gid "#{node['bigdata'][:group]}" + supports :manage_home => true + shell "/bin/false" + home "#{node['bigdata'][:home]}" + system true + action :create + end + + # + # Retrieve the package prepared for Brew: + # + remote_file "/tmp/bigdata.tgz" do + owner node['bigdata'][:user] + group node['bigdata'][:group] + source node['bigdata'][:url] + end + + execute "Extract and relocate the bigdata archive" do + cwd "/var/lib" + command "tar xvf /tmp/bigdata.tgz" + end + + + execute "change the ownership of the bigdata home directory to bigdata, which strangely is not" do + user "root" + group "root" + cwd "#{node['bigdata'][:home]}" + command "chown -R #{node['bigdata'][:user]}:#{node['bigdata'][:group]} ." + end + + link "/etc/init.d/bigdataNSS" do + to "#{node['bigdata'][:home]}/bin/bigdata" + end + + # + # We shell out to make template substitutions + # + execute "set the INSTALL_TYPE in bin/bigdata" do + cwd "#{node['bigdata'][:home]}/bin" + command "sed -i 's|<%= INSTALL_TYPE %>|#{node['bigdata'][:install_type]}|' bigdata" + end + + execute "set the BD_HOME in bin/bigdata" do + cwd "#{node['bigdata'][:home]}/bin" + command "sed -i 's|<%= BD_HOME %>|#{node['bigdata'][:home]}|' bigdata" + end + + execute "set the BD_HOME in RWStore.properties" do + cwd "#{node['bigdata'][:jetty_dir]}/WEB-INF" + command "sed -i 's|<%= BD_HOME %>|#{node['bigdata'][:home]}|' RWStore.properties" + end + + execute "set the BD_HOME in log4j.properties" do + cwd "#{node['bigdata'][:jetty_dir]}/WEB-INF/classes" + command "sed -i 's|<%= BD_HOME %>|#{node['bigdata'][:home]}|' log4j.properties" + end + + execute "set the JETTY_DIR in jetty.xml" do + cwd "#{node['bigdata'][:jetty_dir]}/etc/" + command "sed -i 's|<%= JETTY_DIR %>|#{node['bigdata'][:jetty_dir]}|' jetty.xml" + end + + service "bigdataNSS" do + # + # Reenable this when the bin/bigdata script is updated to return a "1" for a successful status: + # + # See: http://comments.gmane.org/gmane.comp.sysutils.chef.user/2723 + # + # supports :status => true, :start => true, :stop => true, :restart => true + supports :start => true, :stop => true, :restart => true + action [ :start, :enable ] + end +end Added: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/tomcat.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/tomcat.rb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/tomcat.rb 2014-05-08 18:09:18 UTC (rev 8232) @@ -0,0 +1,106 @@ +# +# Cookbook Name:: bigdata +# Recipe:: default +# +include_recipe "java" +include_recipe "tomcat" + +# include_attributes "bigdata::tomcat" + +# +# Only do the following for tomcat install +# +if node['bigdata'][:install_type] == "tomcat" + # + # The tomcat cookbook provides an /etc/default/tomcat7 file that contains multiple JAVA_OPTS lines but allows you to + # modify only one of them during installation. As a consequence JAVA_OPTS conflicts may occur. We comment out the + # 2nd JAVA_OPTS line to avoid the potential for any conflicts (which do occur with our default java_options attribute). + # + # Conflicting collector combinations in option list; please refer to the release notes for the combinations allowed + # Error: Could not create the Java Virtual Machine. + # + execute "comment out 2nd JAVA_OPTS line in /etc/default/tomcat7" do + cwd "/etc/default" + command "sed -i 's|JAVA_OPTS=\"${JAVA_OPTS} -XX:+UseConcMarkSweepGC\"|#JAVA_OPTS=\"${JAVA_OPTS} -XX:+UseConcMarkSweepGC\"|' tomcat7" + end + + + # + # Install the WAR file: + # + remote_file "#{node['tomcat'][:webapp_dir]}/bigdata.war" do + source node['bigdata'][:url] + owner node['tomcat'][:user] + group node['tomcat'][:group] + end + + + # + # Create the JNL home directory + # + directory node['bigdata'][:data_dir] do + owner node['tomcat'][:user] + group node['tomcat'][:group] + mode 00755 + action :create + recursive true + end + + + # + # Create the Bigdata log home + # + directory node['bigdata'][:log_dir] do + owner node['tomcat'][:user] + group node['tomcat'][:group] + mode 00755 + action :create + recursive true + end + + + # + # Install the RWStore.properties file: + # + template node['bigdata'][:properties] do + source "RWStore.properties.erb" + owner node['tomcat'][:user] + group node['tomcat'][:group] + mode 00644 + end + + + # + # Install the log4j.properties file: + # + template node['bigdata'][:log4j_properties] do + source "log4j.properties.erb" + owner node['tomcat'][:user] + group node['tomcat'][:group] + mode 00644 + retry_delay 10 + retries 3 + end + + + # + # The RWStore.properties path is the only property that needs to be adjusted in the web.xml file. + # Using a sed command to adjust the property avoids the need to maintain a web.xml template which + # in turn updates frequently relative to the other property files. Thus this recipe becomes + # suitable against a larger range of bigdata releases. + # + execute "set absolute path for RWStore.properties" do + cwd "#{node['bigdata'][:web_home]}/WEB-INF" + command "sed -i 's|<param-value>../webapps/bigdata/RWStore.properties|<param-value>#{node['bigdata'][:home]}/RWStore.properties|' web.xml" + end + + + # + # Delete all log files so that the error and warning messages that appeared during the installation + # process do not unnecessarily alarm anyone. + # + execute "remove log files before retart" do + cwd "#{node['tomcat'][:log_dir]}" + command "rm *" + end +end Added: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/templates/default/RWStore.properties.erb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/templates/default/RWStore.properties.erb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/templates/default/RWStore.properties.erb 2014-05-08 18:09:18 UTC (rev 8232) @@ -0,0 +1,42 @@ +# +# Note: These options are applied when the journal and the triple store are +# first created. + +## +## Journal options. +## + +# The backing file. This contains all your data. You want to put this someplace +# safe. The default locator will wind up in the directory from which you start +# your servlet container. +com.bigdata.journal.AbstractJournal.file=<%= node['bigdata'][:data_dir] %>/bigdata.jnl + +# The persistence engine. Use 'Disk' for the WORM or 'DiskRW' for the RWStore. +com.bigdata.journal.AbstractJournal.bufferMode=<%= node['bigdata']['journal.AbstractJournal.bufferMode'] %> + +# Setup for the RWStore recycler rather than session protection. +com.bigdata.service.AbstractTransactionService.minReleaseAge=<%= node['bigdata']['service.AbstractTransactionService.minReleaseAge'] %> + +com.bigdata.btree.writeRetentionQueue.capacity=<%= node['bigdata']['btree.writeRetentionQueue.capacity'] %> +com.bigdata.btree.BTree.branchingFactor=<%= node['bigdata']['btree.BTree.branchingFactor'] %> + +# 200M initial extent. +com.bigdata.journal.AbstractJournal.initialExtent=<%= node['bigdata']['journal.AbstractJournal.initialExtent'] %> +com.bigdata.journal.AbstractJournal.maximumExtent=<%= node['bigdata']['journal.AbstractJournal.maximumExtent'] %> + +## +## Setup for QUADS mode without the full text index. +## +com.bigdata.rdf.sail.truthMaintenance=<%= node['bigdata']['rdf.sail.truthMaintenance'] %> +com.bigdata.rdf.store.AbstractTripleStore.quads=<%= node['bigdata']['rdf.store.AbstractTripleStore.quads'] %> +com.bigdata.rdf.store.AbstractTripleStore.statementIdentifiers=<%= node['bigdata']['rdf.store.AbstractTripleStore.statementIdentifiers'] %> +com.bigdata.rdf.store.AbstractTripleStore.textIndex=<%= node['bigdata']['rdf.store.AbstractTripleStore.textIndex'] %> +com.bigdata.rdf.store.AbstractTripleStore.axiomsClass=<%= node['bigdata']['rdf.store.AbstractTripleStore.axiomsClass'] %> + +# Bump up the branching factor for the lexicon indices on the default kb. +com.bigdata.namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor=<%= node['bigdata']['namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor'] %> + +# Bump up the branching factor for the statement indices on the default kb. +com.bigdata.namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor=<%= node['bigdata']['namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor'] %> +com.bigdata.rdf.sail.bufferCapacity=<%= node['bigdata']['rdf.sail.sailBufferCapacity'] %> +# com.bigdata.rdf.store.AbstractTripleStore.vocabularyClass=<%= node['bigdata']['rdf.store.AbstractTripleStore.vocabularyClass'] %> Added: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/templates/default/log4j.properties.erb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/templates/default/log4j.properties.erb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/templates/default/log4j.properties.erb 2014-05-08 18:09:18 UTC (rev 8232) @@ -0,0 +1,98 @@ +# Default log4j configuration. See the individual classes for the +# specific loggers, but generally they are named for the class in +# which they are defined. + +# Default log4j configuration for testing purposes. +# +# You probably want to set the default log level to ERROR. +# +#log4j.rootCategory=WARN, dest1 +#log4j.rootCategory=WARN, dest2 +log4j.rootCategory=WARN, file + +# Loggers. +# Note: logging here at INFO or DEBUG will significantly impact throughput! +log4j.logger.com.bigdata=WARN +log4j.logger.com.bigdata.btree=WARN +log4j.logger.com.bigdata.counters.History=ERROR +log4j.logger.com.bigdata.counters.XMLUtility$MyHandler=ERROR +log4j.logger.com.bigdata.counters.query.CounterSetQuery=INFO +log4j.logger.com.bigdata.journal.CompactTask=INFO +log4j.logger.com.bigdata.relation.accesspath.BlockingBuffer=ERROR +log4j.logger.com.bigdata.rdf.load=INFO +log4j.logger.com.bigdata.resources.AsynchronousOverflowTask=INFO + +# Normal data loader (single threaded). +# log4j.logger.com.bigdata.rdf.store.DataLoader=INFO + + +# file +log4j.appender.file=org.apache.log4j.RollingFileAppender +log4j.appender.file.File=<%= node['bigdata'][:log_dir] %>/bigdata.log +log4j.appender.file.MaxFileSize=4MB +log4j.appender.file.MaxBackupIndex=10 +log4j.appender.file.layout=org.apache.log4j.PatternLayout +log4j.appender.file.layout.ConversionPattern=%d{MMM dd, yyyy HH:mm:ss} %-5p: %F:%L: %m%n + +# dest1 +log4j.appender.dest1=org.apache.log4j.ConsoleAppender +log4j.appender.dest1.layout=org.apache.log4j.PatternLayout +log4j.appender.dest1.layout.ConversionPattern=%d{MMM dd, yyyy HH:mm:ss} %-5p: %F:%L: %m%n +#log4j.appender.dest1.layout.ConversionPattern=%-5p: %r %l: %m%n +#log4j.appender.dest1.layout.ConversionPattern=%-5p: %m%n +#log4j.appender.dest1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n +#log4j.appender.dest1.layout.ConversionPattern=%-4r(%d) [%t] %-5p %c(%l:%M) %x - %m%n + +## dest2 includes the thread name and elapsed milliseconds. +## Note: %r is elapsed milliseconds. +## Note: %t is the thread name. +## See http://logging.apache.org/log4j/1.2/apidocs/org/apache/log4j/PatternLayout.html +#log4j.appender.dest2=org.apache.log4j.ConsoleAppender +#log4j.appender.dest2.layout=org.apache.log4j.PatternLayout +#log4j.appender.dest2.layout.ConversionPattern=%-5p: %r %X{hostname} %X{serviceUUID} %X{taskname} %X{timestamp} %X{resources} %t %l: %m%n +# +### +## Rule execution log. This is a formatted log file (comma delimited). +log4j.logger.com.bigdata.relation.rule.eval.RuleLog=INFO,ruleLog +log4j.additivity.com.bigdata.relation.rule.eval.RuleLog=false +log4j.appender.ruleLog=org.apache.log4j.FileAppender +log4j.appender.ruleLog.Threshold=ALL +log4j.appender.ruleLog.File=rules.log +log4j.appender.ruleLog.File=<%= node['bigdata'][:log_dir] %>/rules.log +log4j.appender.ruleLog.Append=true +## I find that it is nicer to have this unbuffered since you can see what +## is going on and to make sure that I have complete rule evaluation logs +## on shutdown. +log4j.appender.ruleLog.BufferedIO=false +log4j.appender.ruleLog.layout=org.apache.log4j.PatternLayout +log4j.appender.ruleLog.layout.ConversionPattern=%m +# +### +## Summary query evaluation log (tab delimited file). Uncomment the next line to enable +##log4j.logger.com.bigdata.bop.engine.QueryLog=INFO,queryLog +#log4j.additivity.com.bigdata.bop.engine.QueryLog=false +#log4j.appender.queryLog=org.apache.log4j.FileAppender +#log4j.appender.queryLog.Threshold=ALL +#log4j.appender.queryLog.File=<%= node['bigdata'][:log_dir] %>/queryLog.csv +#log4j.appender.queryLog.Append=true +## I find that it is nicer to have this unbuffered since you can see what +## is going on and to make sure that I have complete rule evaluation logs +## on shutdown. +#log4j.appender.queryLog.BufferedIO=false +#log4j.appender.queryLog.layout=org.apache.log4j.PatternLayout +#log4j.appender.queryLog.layout.ConversionPattern=%m +# +### +## BOp run state trace (tab delimited file). Uncomment the next line to enable. +##log4j.logger.com.bigdata.bop.engine.RunState$TableLog=INFO,queryRunStateLog +#log4j.additivity.com.bigdata.bop.engine.RunState$TableLog=false +#log4j.appender.queryRunStateLog=org.apache.log4j.FileAppender +#log4j.appender.queryRunStateLog.Threshold=ALL +#log4j.appender.queryRunStateLog.File=<%= node['bigdata'][:log_dir] %>/queryRunState.log +#log4j.appender.queryRunStateLog.Append=true +## I find that it is nicer to have this unbuffered since you can see what +## is going on and to make sure that I have complete rule evaluation logs +## on shutdown. +#log4j.appender.queryRunStateLog.BufferedIO=false +#log4j.appender.queryRunStateLog.layout=org.apache.log4j.PatternLayout +#log4j.appender.queryRunStateLog.layout.ConversionPattern=%m This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2014-05-08 17:53:55
|
Revision: 8231 http://sourceforge.net/p/bigdata/code/8231 Author: mrpersonick Date: 2014-05-08 17:53:52 +0000 (Thu, 08 May 2014) Log Message: ----------- rolling back changes to build.xml and RESTServlet from r8223 Revision Links: -------------- http://sourceforge.net/p/bigdata/code/8223 Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java branches/BIGDATA_RELEASE_1_3_0/build.xml Removed Paths: ------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java Deleted: branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java 2014-05-08 03:12:55 UTC (rev 8230) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java 2014-05-08 17:53:52 UTC (rev 8231) @@ -1,157 +0,0 @@ -/** -Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. - -Contact: - SYSTAP, LLC - 4501 Tower Road - Greensboro, NC 27410 - lic...@bi... - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; version 2 of the License. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software -Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ -package com.bigdata.rdf.sail.webapp; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.apache.log4j.Logger; - -import com.bigdata.blueprints.BigdataGraphBulkLoad; -import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; -import com.bigdata.rdf.sail.webapp.client.MiniMime; -import com.bigdata.rdf.store.AbstractTripleStore; -import com.tinkerpop.blueprints.util.io.graphml.GraphMLReader; - -/** - * Helper servlet for the blueprints layer. - */ -public class BlueprintsServlet extends BigdataRDFServlet { - - /** - * - */ - private static final long serialVersionUID = 1L; - - static private final transient Logger log = Logger.getLogger(BlueprintsServlet.class); - - static public final List<String> mimeTypes = Arrays.asList(new String[] { - "application/graphml+xml" - }) ; - - /** - * Flag to signify a blueprints operation. - */ - static final transient String ATTR_BLUEPRINTS = "blueprints"; - -// /** -// * Flag to signify a convert operation. POST an RDF document with a -// * content type and an accept header for what it should be converted to. -// */ -// static final transient String ATTR_CONVERT = "convert"; - - - public BlueprintsServlet() { - - } - - /** - * Post a GraphML file to the blueprints layer. - */ - @Override - protected void doPost(final HttpServletRequest req, - final HttpServletResponse resp) throws IOException { - - final long begin = System.currentTimeMillis(); - - final String namespace = getNamespace(req); - - final long timestamp = getTimestamp(req); - - final AbstractTripleStore tripleStore = getBigdataRDFContext() - .getTripleStore(namespace, timestamp); - - if (tripleStore == null) { - /* - * There is no such triple/quad store instance. - */ - buildResponse(resp, HTTP_NOTFOUND, MIME_TEXT_PLAIN); - return; - } - - final String contentType = req.getContentType(); - - if (log.isInfoEnabled()) - log.info("Request body: " + contentType); - - final String mimeType = new MiniMime(contentType).getMimeType().toLowerCase(); - - if (!mimeTypes.contains(mimeType)) { - - buildResponse(resp, HTTP_BADREQUEST, MIME_TEXT_PLAIN, - "Content-Type not recognized as graph data: " + contentType); - - return; - - } - - try { - - BigdataSailRepositoryConnection conn = null; - try { - - conn = getBigdataRDFContext() - .getUnisolatedConnection(namespace); - - final BigdataGraphBulkLoad graph = new BigdataGraphBulkLoad(conn); - - GraphMLReader.inputGraph(graph, req.getInputStream()); - - graph.commit(); - - final long nmodified = graph.getMutationCountLastCommit(); - - final long elapsed = System.currentTimeMillis() - begin; - - reportModifiedCount(resp, nmodified, elapsed); - - return; - - } catch(Throwable t) { - - if(conn != null) - conn.rollback(); - - throw new RuntimeException(t); - - } finally { - - if (conn != null) - conn.close(); - - } - - } catch (Exception ex) { - - // Will be rendered as an INTERNAL_ERROR. - throw new RuntimeException(ex); - - } - - } - -} Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java 2014-05-08 03:12:55 UTC (rev 8230) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java 2014-05-08 17:53:52 UTC (rev 8231) @@ -59,7 +59,6 @@ private DeleteServlet m_deleteServlet; private UpdateServlet m_updateServlet; private WorkbenchServlet m_workbenchServlet; - private BlueprintsServlet m_blueprintsServlet; /** * @see <a href="https://sourceforge.net/apps/trac/bigdata/ticket/584"> @@ -85,7 +84,6 @@ m_deleteServlet = new DeleteServlet(); m_describeServlet = new DescribeCacheServlet(); m_workbenchServlet = new WorkbenchServlet(); - m_blueprintsServlet = new BlueprintsServlet(); m_queryServlet.init(getServletConfig()); m_insertServlet.init(getServletConfig()); @@ -93,7 +91,6 @@ m_deleteServlet.init(getServletConfig()); m_describeServlet.init(getServletConfig()); m_workbenchServlet.init(getServletConfig()); - m_blueprintsServlet.init(getServletConfig()); } @@ -133,11 +130,6 @@ m_workbenchServlet = null; } - if (m_blueprintsServlet != null) { - m_blueprintsServlet.destroy(); - m_blueprintsServlet = null; - } - super.destroy(); } @@ -250,10 +242,6 @@ m_workbenchServlet.doPost(req, resp); - } else if (req.getParameter(BlueprintsServlet.ATTR_BLUEPRINTS) != null) { - - m_blueprintsServlet.doPost(req, resp); - } else if (req.getParameter("uri") != null) { // INSERT via w/ URIs Modified: branches/BIGDATA_RELEASE_1_3_0/build.xml =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/build.xml 2014-05-08 03:12:55 UTC (rev 8230) +++ branches/BIGDATA_RELEASE_1_3_0/build.xml 2014-05-08 17:53:52 UTC (rev 8231) @@ -46,131 +46,128 @@ <project name="bigdata" default="bundleJar" basedir="."> - <property file="build.properties" /> + <property file="build.properties" /> - <!-- build-time classpath. --> - <path id="build.classpath"> - <fileset dir="${bigdata.dir}/bigdata/lib"> - <include name="**/*.jar" /> - </fileset> - <fileset dir="${bigdata.dir}/bigdata-jini/lib"> - <include name="**/*.jar" /> - </fileset> - <fileset dir="${bigdata.dir}/bigdata-rdf/lib"> - <include name="**/*.jar" /> - </fileset> - <fileset dir="${bigdata.dir}/bigdata-sails/lib"> - <include name="**/*.jar" /> - </fileset> + <!-- build-time classpath. --> + <path id="build.classpath"> + <fileset dir="${bigdata.dir}/bigdata/lib"> + <include name="**/*.jar" /> + </fileset> + <fileset dir="${bigdata.dir}/bigdata-jini/lib"> + <include name="**/*.jar" /> + </fileset> + <fileset dir="${bigdata.dir}/bigdata-rdf/lib"> + <include name="**/*.jar" /> + </fileset> + <fileset dir="${bigdata.dir}/bigdata-sails/lib"> + <include name="**/*.jar" /> + </fileset> <fileset dir="${bigdata.dir}/bigdata-gom/lib"> <include name="**/*.jar" /> </fileset> - <fileset dir="${bigdata.dir}/bigdata-blueprints/lib"> - <include name="**/*.jar" /> - </fileset> - <!-- - <fileset dir="${bigdata.dir}/ctc-striterator/lib"> - <include name="**/*.jar" /> - </fileset> --> - </path> + <!-- + <fileset dir="${bigdata.dir}/ctc-striterator/lib"> + <include name="**/*.jar" /> + </fileset> --> + </path> - <!-- runtime classpath w/o install. --> - <path id="runtime.classpath"> - <pathelement location="${build.dir}/classes" /> - <path refid="build.classpath" /> - </path> + <!-- runtime classpath w/o install. --> + <path id="runtime.classpath"> + <pathelement location="${build.dir}/classes" /> + <path refid="build.classpath" /> + </path> - <!-- classpath as installed. --> - <!-- @todo .so and .dll --> - <path id="install.classpath"> - <fileset dir="${install.lib.dir}"> - <include name="**/*.jar" /> - </fileset> - </path> + <!-- classpath as installed. --> + <!-- @todo .so and .dll --> + <path id="install.classpath"> + <fileset dir="${install.lib.dir}"> + <include name="**/*.jar" /> + </fileset> + </path> - <target name="clean" description="cleans everything in [build.dir]."> - <delete dir="${build.dir}" /> - <delete dir="${bigdata.dir}/bigdata-test" quiet="true" /> - <delete dir="${bigdata.dir}/dist" quiet="true" /> - </target> + <target name="clean" description="cleans everything in [build.dir]."> + <delete dir="${build.dir}" /> + <delete dir="${bigdata.dir}/bigdata-test" quiet="true" /> + <delete dir="${bigdata.dir}/dist" quiet="true" /> + </target> - <target name="prepare"> - <!-- setup ${version} for regular or snapshot. --> - <tstamp> - <format property="today" pattern="yyyyMMdd" locale="en,US" /> - <format property="osgiDate" pattern="yyyyMMdd" locale="en,US" /> - </tstamp> + <target name="prepare"> + <!-- setup ${version} for regular or snapshot. --> + <tstamp> + <format property="today" pattern="yyyyMMdd" locale="en,US" /> + <format property="osgiDate" pattern="yyyyMMdd" locale="en,US" /> + </tstamp> <condition property="client-version" value="bigdata-client-${build.ver}-${today}" else="bigdata-client-${build.ver}"> <istrue value="${snapshot}" /> </condition> - <condition property="version" value="bigdata-${build.ver}-${today}" else="bigdata-${build.ver}"> - <istrue value="${snapshot}" /> - </condition> - <condition property="osgi.version" value="${build.ver.osgi}.${osgiDate}" else="${build.ver.osgi}.0"> - <istrue value="${snapshot}" /> - </condition> - <!--<echo message="today=${today}"/>--> - <echo message="version=${version}" /> - <available property="svn.checkout" file="./.svn/entries"/> - <echo message="svn.checkout=${svn.checkout}" /> - <!-- create directories. --> - <mkdir dir="${build.dir}" /> - <mkdir dir="${build.dir}/classes" /> - <mkdir dir="${build.dir}/docs" /> - <mkdir dir="${build.dir}/lib" /> - </target> + <condition property="version" value="bigdata-${build.ver}-${today}" else="bigdata-${build.ver}"> + <istrue value="${snapshot}" /> + </condition> + <condition property="osgi.version" value="${build.ver.osgi}.${osgiDate}" else="${build.ver.osgi}.0"> + <istrue value="${snapshot}" /> + </condition> + <!--<echo message="today=${today}"/>--> + <echo message="version=${version}" /> + <available property="svn.checkout" file="./.svn/entries"/> + <echo message="svn.checkout=${svn.checkout}" /> + <!-- create directories. --> + <mkdir dir="${build.dir}" /> + <mkdir dir="${build.dir}/classes" /> + <mkdir dir="${build.dir}/docs" /> + <mkdir dir="${build.dir}/lib" /> + </target> - <target name="buildinfo" depends="prepare" if="svn.checkout" - description="Generate a BuildInfo.java file with metadata about this build."> - <property name="buildinfo.file" - value="${bigdata.dir}\bigdata\src\java\com\bigdata\BuildInfo.java"/> - <loadfile property="svn.revision" srcFile="./.svn/entries"> - <filterchain> - <headfilter lines="1" skip="3"/> - <striplinebreaks/> - </filterchain> - </loadfile> - <loadfile property="svn.url" srcFile="./.svn/entries"> - <filterchain> - <headfilter lines="1" skip="4"/> - <striplinebreaks/> - </filterchain> - </loadfile> - <tstamp> - <format property="build.timestamp" pattern="yyyy/MM/dd HH:mm:ss z" locale="en,US" /> - </tstamp> - <property environment="env" /> - <echo file="${buildinfo.file}"> + <target name="buildinfo" depends="prepare" if="svn.checkout" + description="Generate a BuildInfo.java file with metadata about this build."> + <property name="buildinfo.file" + value="${bigdata.dir}\bigdata\src\java\com\bigdata\BuildInfo.java"/> + <loadfile property="svn.revision" srcFile="./.svn/entries"> + <filterchain> + <headfilter lines="1" skip="3"/> + <striplinebreaks/> + </filterchain> + </loadfile> + <loadfile property="svn.url" srcFile="./.svn/entries"> + <filterchain> + <headfilter lines="1" skip="4"/> + <striplinebreaks/> + </filterchain> + </loadfile> + <tstamp> + <format property="build.timestamp" pattern="yyyy/MM/dd HH:mm:ss z" locale="en,US" /> + </tstamp> + <property environment="env" /> + <echo file="${buildinfo.file}"> package com.bigdata; public class BuildInfo { public static final String buildVersion="${build.ver}"; public static final String buildVersionOSGI="${build.ver.osgi}"; - public static final String svnRevision="${svn.revision}"; + public static final String svnRevision="${svn.revision}"; public static final String svnURL="${svn.url}"; - public static final String buildTimestamp="${build.timestamp}"; - public static final String buildUser="${user.name}"; - public static final String buildHost="${env.COMPUTERNAME}"; - public static final String osArch="${os.arch}"; - public static final String osName="${os.name}"; - public static final String osVersion="${os.version}"; + public static final String buildTimestamp="${build.timestamp}"; + public static final String buildUser="${user.name}"; + public static final String buildHost="${env.COMPUTERNAME}"; + public static final String osArch="${os.arch}"; + public static final String osName="${os.name}"; + public static final String osVersion="${os.version}"; } </echo> - <loadfile property="buildinfo" srcFile="${buildinfo.file}"/> - <echo message="${buildinfo}"/> - </target> - + <loadfile property="buildinfo" srcFile="${buildinfo.file}"/> + <echo message="${buildinfo}"/> + </target> + <!-- Note: I had to explicitly specify the location of the jdepend jar in Preferences => Ant => Runtime in order to get this to work under eclipse. This is odd since eclipse bundles the jar with the ant plugin. - http://www.ryanlowe.ca/blog/archives/001038_junit_ant_task_doesnt_work_in_eclipse.php - - outputfile="${build.dir}/docs/jdepend-report.txt" + http://www.ryanlowe.ca/blog/archives/001038_junit_ant_task_doesnt_work_in_eclipse.php + + outputfile="${build.dir}/docs/jdepend-report.txt" --> <target name="jdepend" depends="jar"> <jdepend format="xml" - outputfile="${build.dir}/docs/jdepend-report.xml"> + outputfile="${build.dir}/docs/jdepend-report.xml"> <exclude name="java.*"/> <exclude name="javax.*"/> <classespath> @@ -185,89 +182,84 @@ <!-- Note: This will (re-)compile the SPARQL grammar. Compilation is --> <!-- fast, but javacc must be installed. --> <target name="javacc" depends="prepare" - description="Compile the SPARQL grammar."> + description="Compile the SPARQL grammar."> <jjtree - javacchome="${javacc.home}" - target="bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/ast/sparql.jjt" - outputfile="sparql.jj" + javacchome="${javacc.home}" + target="bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/ast/sparql.jjt" + outputfile="sparql.jj" outputdirectory="bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/ast/" /> <javacc - javacchome="${javacc.home}" - target="bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/ast/sparql.jj" - outputdirectory="bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/ast/" - /> + javacchome="${javacc.home}" + target="bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/ast/sparql.jj" + outputdirectory="bigdata-sails/src/java/com/bigdata/rdf/sail/sparql/ast/" + /> </target> - <!-- Note: javac error results often if verbose is disabled. --> - <!-- I was able to perform a build with 1.6.0_07. --> - <!-- I set the target to 1.5 to support deployment on non-1.6 JVMs. --> - <target name="compile" depends="prepare, buildinfo" - description="Compile the code base."> - <mkdir dir="${build.dir}" /> - <echo>javac</echo> - <echo> destdir="${build.dir}"</echo> - <echo> fork="yes"</echo> - <echo> memorymaximumsize="1g"</echo> - <echo> debug="yes"</echo> - <echo> debuglevel="${javac.debuglevel}"</echo> - <echo> verbose="${javac.verbose}"</echo> - <echo> encoding="${javac.encoding}"</echo> - <echo> source="${javac.source}"</echo> - <echo> target="${javac.target}"</echo> - <javac classpathref="build.classpath" - destdir="${build.dir}/classes" - fork="yes" - memorymaximumsize="1g" - debug="${javac.debug}" - debuglevel="${javac.debuglevel}" - verbose="${javac.verbose}" - encoding="${javac.encoding}" - source="${javac.source}" - target="${javac.target}" - includeantruntime="false" - > - <!-- note: must also specify -bootclasspath and -extdirs when cross-compiling --> - <src path="${bigdata.dir}/bigdata/src/java" /> - <src path="${bigdata.dir}/bigdata-jini/src/java" /> + <!-- Note: javac error results often if verbose is disabled. --> + <!-- I was able to perform a build with 1.6.0_07. --> + <!-- I set the target to 1.5 to support deployment on non-1.6 JVMs. --> + <target name="compile" depends="prepare, buildinfo" + description="Compile the code base."> + <mkdir dir="${build.dir}" /> + <echo>javac</echo> + <echo> destdir="${build.dir}"</echo> + <echo> fork="yes"</echo> + <echo> memorymaximumsize="1g"</echo> + <echo> debug="yes"</echo> + <echo> debuglevel="${javac.debuglevel}"</echo> + <echo> verbose="${javac.verbose}"</echo> + <echo> encoding="${javac.encoding}"</echo> + <echo> source="${javac.source}"</echo> + <echo> target="${javac.target}"</echo> + <javac classpathref="build.classpath" + destdir="${build.dir}/classes" + fork="yes" + memorymaximumsize="1g" + debug="${javac.debug}" + debuglevel="${javac.debuglevel}" + verbose="${javac.verbose}" + encoding="${javac.encoding}" + source="${javac.source}" + target="${javac.target}" + includeantruntime="false" + > + <!-- note: must also specify -bootclasspath and -extdirs when cross-compiling --> + <src path="${bigdata.dir}/bigdata/src/java" /> + <src path="${bigdata.dir}/bigdata-jini/src/java" /> <src path="${bigdata.dir}/bigdata-rdf/src/java" /> - <src path="${bigdata.dir}/bigdata-blueprints/src/java" /> - <src path="${bigdata.dir}/bigdata-sails/src/java" /> + <src path="${bigdata.dir}/bigdata-sails/src/java" /> <src path="${bigdata.dir}/bigdata-gom/src/java" /> <src path="${bigdata.dir}/bigdata-ganglia/src/java" /> <src path="${bigdata.dir}/bigdata-gas/src/java" /> - <src path="${bigdata.dir}/ctc-striterators/src/java" /> - <!-- Do not include the unit tests @todo conditionally include? + <src path="${bigdata.dir}/ctc-striterators/src/java" /> + <!-- Do not include the unit tests @todo conditionally include? <src path="${bigdata.dir}/bigdata/src/test"/> <src path="${bigdata.dir}/bigdata-jini/src/test"/> <src path="${bigdata.dir}/bigdata-rdf/src/test"/> <src path="${bigdata.dir}/bigdata-sails/src/test"/> --> - <compilerarg value="-version" /> - </javac> - <!-- copy resources. --> - <copy toDir="${build.dir}/classes"> - <fileset dir="${bigdata.dir}/bigdata/src/java"> - <exclude name="**/*.java" /> - <exclude name="**/package.html" /> - <exclude name="**/BytesUtil.c" /> - </fileset> - <fileset dir="${bigdata.dir}/bigdata-jini/src/java"> - <exclude name="**/*.java" /> - <exclude name="**/package.html" /> - </fileset> - <fileset dir="${bigdata.dir}/bigdata-rdf/src/java"> - <exclude name="**/*.java" /> - <exclude name="**/package.html" /> - </fileset> - <fileset dir="${bigdata.dir}/bigdata-sails/src/java"> - <exclude name="**/*.java" /> - <exclude name="**/package.html" /> - </fileset> - <fileset dir="${bigdata.dir}/bigdata-blueprints/src/java"> + <compilerarg value="-version" /> + </javac> + <!-- copy resources. --> + <copy toDir="${build.dir}/classes"> + <fileset dir="${bigdata.dir}/bigdata/src/java"> <exclude name="**/*.java" /> <exclude name="**/package.html" /> + <exclude name="**/BytesUtil.c" /> </fileset> + <fileset dir="${bigdata.dir}/bigdata-jini/src/java"> + <exclude name="**/*.java" /> + <exclude name="**/package.html" /> + </fileset> + <fileset dir="${bigdata.dir}/bigdata-rdf/src/java"> + <exclude name="**/*.java" /> + <exclude name="**/package.html" /> + </fileset> + <fileset dir="${bigdata.dir}/bigdata-sails/src/java"> + <exclude name="**/*.java" /> + <exclude name="**/package.html" /> + </fileset> <fileset dir="${bigdata.dir}/bigdata-gom/src/java"> <exclude name="**/*.java" /> <exclude name="**/package.html" /> @@ -276,37 +268,37 @@ <exclude name="**/*.java" /> <exclude name="**/package.html" /> </fileset> - <!-- Note: This simple copy works so long as there is just one service - provider file per interface. It will not combine (append) multiple - files for the same interface. --> - <fileset dir="${bigdata.dir}/bigdata-rdf/src/resources/service-providers"> - <include name="META-INF/**" /> - </fileset> - <fileset dir="${bigdata.dir}/bigdata-sails/src/resources/sesame-server"> - <include name="META-INF/**" /> - </fileset> - <!-- Copy WAR resources for the embedded NanoSparqlServer. --> - <!-- TODO: This could cause problem since the files exist in --> - <!-- both the JAR and the staged artifact (bigdata/var/jetty). --> - <!-- This makes it difficult to override the ones in the JAR. --> - <!-- See also "run-junit" for an alterative to getting CI to run. --> - <!-- newer approach. --> - <!--fileset dir="${bigdata.dir}/bigdata-war/src"> - <include name="**"/> - </fileset--> - <!-- older approach. --> - <fileset dir="." includes="bigdata-war/src/**"/> - </copy> - </target> + <!-- Note: This simple copy works so long as there is just one service + provider file per interface. It will not combine (append) multiple + files for the same interface. --> + <fileset dir="${bigdata.dir}/bigdata-rdf/src/resources/service-providers"> + <include name="META-INF/**" /> + </fileset> + <fileset dir="${bigdata.dir}/bigdata-sails/src/resources/sesame-server"> + <include name="META-INF/**" /> + </fileset> + <!-- Copy WAR resources for the embedded NanoSparqlServer. --> + <!-- TODO: This could cause problem since the files exist in --> + <!-- both the JAR and the staged artifact (bigdata/var/jetty). --> + <!-- This makes it difficult to override the ones in the JAR. --> + <!-- See also "run-junit" for an alterative to getting CI to run. --> + <!-- newer approach. --> + <!--fileset dir="${bigdata.dir}/bigdata-war/src"> + <include name="**"/> + </fileset--> + <!-- older approach. --> + <fileset dir="." includes="bigdata-war/src/**"/> + </copy> + </target> - <!-- Builds the bigdata JAR and bundles it together with all of its dependencies in the ${build.dir}/lib directory. --> - <target name="bundleJar" depends="clean, bundle, jar" description="Builds the bigdata JAR and bundles it together with all of its dependencies in the ${build.dir}/lib directory."> - <copy file="${build.dir}/${version}.jar" todir="${build.dir}/lib"/> - <!--<property name="myclasspath" refid="runtime.classpath" /> - <echo message="${myclasspath}"/>--> - </target> + <!-- Builds the bigdata JAR and bundles it together with all of its dependencies in the ${build.dir}/lib directory. --> + <target name="bundleJar" depends="clean, bundle, jar" description="Builds the bigdata JAR and bundles it together with all of its dependencies in the ${build.dir}/lib directory."> + <copy file="${build.dir}/${version}.jar" todir="${build.dir}/lib"/> + <!--<property name="myclasspath" refid="runtime.classpath" /> + <echo message="${myclasspath}"/>--> + </target> - <target name="sourceJar" depends="prepare" description="Generates the sources jar."> + <target name="sourceJar" depends="prepare" description="Generates the sources jar."> <jar destfile="${build.dir}/${version}-sources.jar"> <fileset dir="${bigdata.dir}/bigdata/src/java" /> <fileset dir="${bigdata.dir}/bigdata/src/samples" /> @@ -321,192 +313,187 @@ <fileset dir="${bigdata.dir}/bigdata-gom/src/java" /> <fileset dir="${bigdata.dir}/bigdata-gom/src/samples" /> <fileset dir="${bigdata.dir}/ctc-striterators/src/java" /> - <fileset dir="${bigdata.dir}/bigdata-blueprints/src/java" /> </jar> </target> - - <!-- This generates the jar, but does not bundled the dependencies. - See 'bundleJar'. --> - <target name="jar" depends="compile" description="Generates the jar (see also bundleJar)."> - <jar destfile="${build.dir}/${version}.jar"> - <fileset dir="${build.dir}/classes" excludes="test/**" /> - <!-- Copy the copyright top-level NOTICE file. --> - <fileset file="${bigdata.dir}/NOTICE"/> - <!-- Copy the copyright top-level LICENSE file. --> - <fileset file="${bigdata.dir}/LICENSE.txt"/> - <!-- Copy licenses for any project from which have imported something. --> - <fileset dir="${bigdata.dir}/bigdata"> - <include name="LEGAL/apache-license-2_0.txt"/> - </fileset> - <fileset dir="${bigdata.dir}/bigdata-rdf"> - <include name="LEGAL/sesame2.x-license.txt"/> - </fileset> - <manifest> - <!--<attribute name="Main-Class" value="com/bigdata/rdf/rio/TestRioIntegration"/>--> - </manifest> - </jar> - </target> - - <!-- Deploy the JAR to the maven repository. --> + + <!-- This generates the jar, but does not bundled the dependencies. + See 'bundleJar'. --> + <target name="jar" depends="compile" description="Generates the jar (see also bundleJar)."> + <jar destfile="${build.dir}/${version}.jar"> + <fileset dir="${build.dir}/classes" excludes="test/**" /> + <!-- Copy the copyright top-level NOTICE file. --> + <fileset file="${bigdata.dir}/NOTICE"/> + <!-- Copy the copyright top-level LICENSE file. --> + <fileset file="${bigdata.dir}/LICENSE.txt"/> + <!-- Copy licenses for any project from which have imported something. --> + <fileset dir="${bigdata.dir}/bigdata"> + <include name="LEGAL/apache-license-2_0.txt"/> + </fileset> + <fileset dir="${bigdata.dir}/bigdata-rdf"> + <include name="LEGAL/sesame2.x-license.txt"/> + </fileset> + <manifest> + <!--<attribute name="Main-Class" value="com/bigdata/rdf/rio/TestRioIntegration"/>--> + </manifest> + </jar> + </target> + + <!-- Deploy the JAR to the maven repository. --> <target name="maven-deploy" depends="jar" - description="Deploy the jar to the maven repository."> - <exec command="${MAVEN_HOME}/bin/mvn"> - <arg value="clean"/> - <arg value="deploy"/> - </exec> - </target> + description="Deploy the jar to the maven repository."> + <exec command="${MAVEN_HOME}/bin/mvn"> + <arg value="clean"/> + <arg value="deploy"/> + </exec> + </target> - <!-- This generates an osgi bundle jar, but does not bundle the dependencies. - See 'bundleJar'. --> - <target name="osgi" depends="compile, bundle" description="Generates the osgi bundle jar (see also bundleJar)."> - <taskdef resource="aQute/bnd/ant/taskdef.properties" classpath="bigdata/lib/bnd-0.0.384.jar" /> - <mkdir dir="${build.dir}/bundles" /> - <jar destfile="${build.dir}/bundles/com.bigdata.source_${osgi.version}.jar"> - <manifest> - <attribute name="Eclipse-SourceBundle" value='com.bigdata;version="${osgi.version}";roots="."' /> - <attribute name="Bundle-Vendor" value="Systap" /> - <attribute name="Bundle-Version" value="${build.ver.osgi}" /> - <attribute name="Bundle-ManifestVersion" value="2" /> - <attribute name="Bundle-SymbolicName" value="com.bigdata.source" /> - <attribute name="Bundle-DocURL" value="http://www.bigdata.com" /> - <attribute name="Bundle-Description" value="Bigdata Source" /> - </manifest> - <fileset dir="bigdata/src/java" /> - <fileset dir="bigdata-jini/src/java" /> + <!-- This generates an osgi bundle jar, but does not bundle the dependencies. + See 'bundleJar'. --> + <target name="osgi" depends="compile, bundle" description="Generates the osgi bundle jar (see also bundleJar)."> + <taskdef resource="aQute/bnd/ant/taskdef.properties" classpath="bigdata/lib/bnd-0.0.384.jar" /> + <mkdir dir="${build.dir}/bundles" /> + <jar destfile="${build.dir}/bundles/com.bigdata.source_${osgi.version}.jar"> + <manifest> + <attribute name="Eclipse-SourceBundle" value='com.bigdata;version="${osgi.version}";roots="."' /> + <attribute name="Bundle-Vendor" value="Systap" /> + <attribute name="Bundle-Version" value="${build.ver.osgi}" /> + <attribute name="Bundle-ManifestVersion" value="2" /> + <attribute name="Bundle-SymbolicName" value="com.bigdata.source" /> + <attribute name="Bundle-DocURL" value="http://www.bigdata.com" /> + <attribute name="Bundle-Description" value="Bigdata Source" /> + </manifest> + <fileset dir="bigdata/src/java" /> + <fileset dir="bigdata-jini/src/java" /> <fileset dir="bigdata-rdf/src/java" /> - <fileset dir="bigdata-sails/src/java" /> + <fileset dir="bigdata-sails/src/java" /> <fileset dir="bigdata-gom/src/java" /> - </jar> - <bnd output="${build.dir}/bundles/com.bigata-${osgi.version}.jar" classpath="${build.dir}/classes" eclipse="false" failok="false" exceptions="true" files="${basedir}/osgi/bigdata.bnd" /> + </jar> + <bnd output="${build.dir}/bundles/com.bigata-${osgi.version}.jar" classpath="${build.dir}/classes" eclipse="false" failok="false" exceptions="true" files="${basedir}/osgi/bigdata.bnd" /> - <bndwrap jars="${build.dir}/lib/colt-${colt.version}.jar" output="${build.dir}/bundles/colt-${colt.version}.jar" definitions="${basedir}/osgi/" /> - <bndwrap jars="${build.dir}/lib/fastutil-${fastutil.version}.jar" output="${build.dir}/bundles/fastutil-${fastutil.version}.jar" definitions="${basedir}/osgi/" /> - <bndwrap jars="${build.dir}/lib/dsi-utils-${dsiutils.version}.jar" output="${build.dir}/bundles/dsi-utils-${dsiutils.version}.jar" definitions="${basedir}/osgi/" /> - <bndwrap jars="${build.dir}/lib/lgpl-utils-${lgplutils.version}.jar" output="${build.dir}/bundles/lgpl-utils-${lgplutils.version}.jar" definitions="${basedir}/osgi/" /> - <bndwrap jars="${build.dir}/lib/high-scale-lib-v${highscalelib.version}.jar" output="${build.dir}/bundles/high-scale-lib-v${highscalelib.version}.jar" definitions="${basedir}/osgi/" /> - <bndwrap jars="${build.dir}/lib/openrdf-sesame-${sesame.version}-onejar.jar" output="${build.dir}/bundles/openrdf-sesame-${sesame.version}.jar" definitions="${basedir}/osgi/" /> - <bndwrap jars="${build.dir}/lib/apache/zookeeper-${zookeeper.version}.jar" output="${build.dir}/bundles/zookeeper-${zookeeper.version}.jar" definitions="${basedir}/osgi/" /> - <bndwrap jars="${build.dir}/lib/nxparser-${nxparser.version}.jar" output="${build.dir}/bundles/nxparser-2010.6.22.jar" definitions="${basedir}/osgi/" /> - </target> + <bndwrap jars="${build.dir}/lib/colt-${colt.version}.jar" output="${build.dir}/bundles/colt-${colt.version}.jar" definitions="${basedir}/osgi/" /> + <bndwrap jars="${build.dir}/lib/fastutil-${fastutil.version}.jar" output="${build.dir}/bundles/fastutil-${fastutil.version}.jar" definitions="${basedir}/osgi/" /> + <bndwrap jars="${build.dir}/lib/dsi-utils-${dsiutils.version}.jar" output="${build.dir}/bundles/dsi-utils-${dsiutils.version}.jar" definitions="${basedir}/osgi/" /> + <bndwrap jars="${build.dir}/lib/lgpl-utils-${lgplutils.version}.jar" output="${build.dir}/bundles/lgpl-utils-${lgplutils.version}.jar" definitions="${basedir}/osgi/" /> + <bndwrap jars="${build.dir}/lib/high-scale-lib-v${highscalelib.version}.jar" output="${build.dir}/bundles/high-scale-lib-v${highscalelib.version}.jar" definitions="${basedir}/osgi/" /> + <bndwrap jars="${build.dir}/lib/openrdf-sesame-${sesame.version}-onejar.jar" output="${build.dir}/bundles/openrdf-sesame-${sesame.version}.jar" definitions="${basedir}/osgi/" /> + <bndwrap jars="${build.dir}/lib/apache/zookeeper-${zookeeper.version}.jar" output="${build.dir}/bundles/zookeeper-${zookeeper.version}.jar" definitions="${basedir}/osgi/" /> + <bndwrap jars="${build.dir}/lib/nxparser-${nxparser.version}.jar" output="${build.dir}/bundles/nxparser-2010.6.22.jar" definitions="${basedir}/osgi/" /> + </target> - <!-- Note: the javadoc requires a LOT of RAM, but runs quickly on a - server class machine. - - @todo man page for [bigdata] script to @{build.dir}/docs - - --> - <target name="javadoc" depends="prepare" if="javadoc"> - <mkdir dir="${build.dir}/docs/api" /> - <javadoc destdir="${build.dir}/docs/api" defaultexcludes="yes" - author="true" version="true" use="true" verbose="no" - overview="${bigdata.dir}/overview.html" - windowtitle="bigdata® v${build.ver}" - classpathref="build.classpath" - encoding="utf-8" - private="false" - > - <arg value="-J-Xmx1000m" /> + <!-- Note: the javadoc requires a LOT of RAM, but runs quickly on a + server class machine. + + @todo man page for [bigdata] script to @{build.dir}/docs + + --> + <target name="javadoc" depends="prepare" if="javadoc"> + <mkdir dir="${build.dir}/docs/api" /> + <javadoc destdir="${build.dir}/docs/api" defaultexcludes="yes" + author="true" version="true" use="true" verbose="no" + overview="${bigdata.dir}/overview.html" + windowtitle="bigdata® v${build.ver}" + classpathref="build.classpath" + encoding="utf-8" + private="false" + > + <arg value="-J-Xmx1000m" /> <arg value="-quiet" /> - <packageset dir="${bigdata.dir}/bigdata/src/java" /> - <packageset dir="${bigdata.dir}/bigdata/src/samples" /> - <packageset dir="${bigdata.dir}/bigdata-jini/src/java" /> - <packageset dir="${bigdata.dir}/bigdata-rdf/src/java" /> - <packageset dir="${bigdata.dir}/bigdata-rdf/src/samples" /> - <packageset dir="${bigdata.dir}/bigdata-sails/src/java" /> - <packageset dir="${bigdata.dir}/bigdata-sails/src/samples" /> - <packageset dir="${bigdata.dir}/bigdata-blueprints/src/java" /> - <packageset dir="${bigdata.dir}/bigdata-gom/src/java" /> + <packageset dir="${bigdata.dir}/bigdata/src/java" /> + <packageset dir="${bigdata.dir}/bigdata/src/samples" /> + <packageset dir="${bigdata.dir}/bigdata-jini/src/java" /> + <packageset dir="${bigdata.dir}/bigdata-rdf/src/java" /> + <packageset dir="${bigdata.dir}/bigdata-rdf/src/samples" /> + <packageset dir="${bigdata.dir}/bigdata-sails/src/java" /> + <packageset dir="${bigdata.dir}/bigdata-sails/src/samples" /> + <packageset dir="${bigdata.dir}/bigdata-gom/src/java" /> <packageset dir="${bigdata.dir}/bigdata-gom/src/samples" /> <packageset dir="${bigdata.dir}/bigdata-gas/src/java" /> - <packageset dir="${bigdata.dir}/ctc-striterators/src/java" /> - <doctitle> - <![CDATA[<h1>bigdata® v${build.ver}</h1>]]></doctitle> - <bottom> - <![CDATA[<i>Copyright © 2006-2014 SYSTAP, LLC. All Rights Reserved.</i>]]></bottom> - <tag name="todo" scope="all" description="TODO:" /> - <tag name="issue" scope="all" description="ISSUE:" /> - <!--tag name="FIXME" scope="all" description="FIXME:"/--> - <link href="http://download.oracle.com/javase/7/docs/api/" /> - <link href="http://openrdf.callimachus.net/sesame/2.7/apidocs/" /> - <link href="http://lucene.apache.org/java/3_0_0/api/"/> + <packageset dir="${bigdata.dir}/ctc-striterators/src/java" /> + <doctitle> + <![CDATA[<h1>bigdata® v${build.ver}</h1>]]></doctitle> + <bottom> + <![CDATA[<i>Copyright © 2006-2014 SYSTAP, LLC. All Rights Reserved.</i>]]></bottom> + <tag name="todo" scope="all" description="TODO:" /> + <tag name="issue" scope="all" description="ISSUE:" /> + <!--tag name="FIXME" scope="all" description="FIXME:"/--> + <link href="http://download.oracle.com/javase/7/docs/api/" /> + <link href="http://openrdf.callimachus.net/sesame/2.7/apidocs/" /> + <link href="http://lucene.apache.org/java/3_0_0/api/"/> <link href="http://lucene.apache.org/core/old_versioned_docs/versions/3_0_3/api/all/"/> - <link href="http://hc.apache.org/httpcomponents-core-ga/httpcore/apidocs/"/> - <link href="http://hc.apache.org/httpcomponents-core-ga/httpcore-nio/apidocs/"/> - <link href="http://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/"/> - <link href="http://hc.apache.org/httpcomponents-client-ga/httpmime/apidocs/"/> - <link href="http://hc.apache.org/httpcomponents-client-ga/httpclient-cache/apidocs/"/> - <link href="http://icu-project.org/apiref/icu4j/"/> - <link href="http://download.eclipse.org/jetty/stable-9/apidocs/"/> + <link href="http://hc.apache.org/httpcomponents-core-ga/httpcore/apidocs/"/> + <link href="http://hc.apache.org/httpcomponents-core-ga/httpcore-nio/apidocs/"/> + <link href="http://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/"/> + <link href="http://hc.apache.org/httpcomponents-client-ga/httpmime/apidocs/"/> + <link href="http://hc.apache.org/httpcomponents-client-ga/httpclient-cache/apidocs/"/> + <link href="http://icu-project.org/apiref/icu4j/"/> + <link href="http://download.eclipse.org/jetty/stable-9/apidocs/"/> </javadoc> </target> <target name="bundle" description="Bundles all dependencies for easier deployments and releases (does not bundle the bigdata jar)."> <copy toDir="${build.dir}/lib" flatten="true"> - <fileset dir="${bigdata.dir}/bigdata/lib"> - <include name="**/*.jar" /> - <include name="**/*.so" /> - <include name="**/*.dll" /> - <!-- The BytesUtil JNI class is not recommended at this time (no performance gain). --> - <exclude name="**/*BytesUtil*" /> - </fileset> - <fileset dir="${bigdata.dir}/bigdata-rdf/lib"> - <include name="**/*.jar" /> - </fileset> - <fileset dir="${bigdata.dir}/bigdata-sails/lib"> - <include name="**/*.jar" /> - </fileset> + <fileset dir="${bigdata.dir}/bigdata/lib"> + <include name="**/*.jar" /> + <include name="**/*.so" /> + <include name="**/*.dll" /> + <!-- The BytesUtil JNI class is not recommended at this time (no performance gain). --> + <exclude name="**/*BytesUtil*" /> + </fileset> + <fileset dir="${bigdata.dir}/bigdata-rdf/lib"> + <include name="**/*.jar" /> + </fileset> + <fileset dir="${bigdata.dir}/bigdata-sails/lib"> + <include name="**/*.jar" /> + </fileset> <fileset dir="${bigdata.dir}/bigdata-gom/lib"> <include name="**/*.jar" /> </fileset> - <fileset dir="${bigdata.dir}/bigdata-blueprints/lib"> - <include name="**/*.jar" /> - </fileset> </copy> <!-- Do NOT flatten the jini jars. We need the to preserve the --> <!-- lib, lib-dl, and lib-ext distinctions. --> <copy toDir="${build.dir}/lib" flatten="false"> - <fileset dir="${bigdata.dir}/bigdata-jini/lib"> - <include name="**/*.jar" /> - </fileset> + <fileset dir="${bigdata.dir}/bigdata-jini/lib"> + <include name="**/*.jar" /> + </fileset> </copy> </target> - <!-- - This target produces a new jar which includes everything from the bigdata - jar, the dsi-util jar, the lgpl-utils jar, and exactly those class files - from colt and fastutil which are required by the proceeding jars. The - main advantage of the resulting jar is that the vast majority of fastutil - is not necessary, and it is a 13M jar. - - <target name="autojar" - description="Produce an expanded version of the bigdata jar which - includes the data from the dsi-util and lgpl-utils jars and only - those classes from fastutil and colt which are required to support - bigdata and dsiutil at runtime."> - <java jar="src/build/autojar/autojar.jar" fork="true" failonerror="true"> - <arg line="-o ${build.dir}/bigdataPlus.jar - -c ${bigdata.dir}/bigdata/lib/unimi/fastutil*.jar - -c ${bigdata.dir}/bigdata/lib/unimi/colt*.jar - ${build.dir}/lib/bigdata*.jar - ${bigdata.dir}/bigdata/lib/dsi-util*.jar - ${bigdata.dir}/bigdata/lib/lgpl-utils*.jar - " /> - </java> - </target> --> - <!-- java autojar.jar -vo fastutil-stripped.jar -c fastutil.jar -Y bigdata.jar --> - <target name="autojar-strip-fastutil" depends="prepare" - description="Strip unused classes from fastutil."> - <java jar="src/build/autojar/autojar.jar" fork="true" failonerror="true"> - <arg line="-o ${build.dir}/fastutil-stripped.jar - -c ${bigdata.dir}/bigdata/lib/unimi/fastutil*.jar - -- - -Y ${build.dir}/lib/${version}.jar - -Y ${bigdata.dir}/bigdata/lib/dsi-util*.jar - -Y ${bigdata.dir}/bigdata/lib/lgpl-utils*.jar - " /> - </java> - </target> + <!-- + This target produces a new jar which includes everything from the bigdata + jar, the dsi-util jar, the lgpl-utils jar, and exactly those class files + from colt and fastutil which are required by the proceeding jars. The + main advantage of the resulting jar is that the vast majority of fastutil + is not necessary, and it is a 13M jar. + + <target name="autojar" + description="Produce an expanded version of the bigdata jar which + includes the data from the dsi-util and lgpl-utils jars and only + those classes from fastutil and colt which are required to support + bigdata and dsiutil at runtime."> + <java jar="src/build/autojar/autojar.jar" fork="true" failonerror="true"> + <arg line="-o ${build.dir}/bigdataPlus.jar + -c ${bigdata.dir}/bigdata/lib/unimi/fastutil*.jar + -c ${bigdata.dir}/bigdata/lib/unimi/colt*.jar + ${build.dir}/lib/bigdata*.jar + ${bigdata.dir}/bigdata/lib/dsi-util*.jar + ${bigdata.dir}/bigdata/lib/lgpl-utils*.jar + " /> + </java> + </target> --> + <!-- java autojar.jar -vo fastutil-stripped.jar -c fastutil.jar -Y bigdata.jar --> + <target name="autojar-strip-fastutil" depends="prepare" + description="Strip unused classes from fastutil."> + <java jar="src/build/autojar/autojar.jar" fork="true" failonerror="true"> + <arg line="-o ${build.dir}/fastutil-stripped.jar + -c ${bigdata.dir}/bigdata/lib/unimi/fastutil*.jar + -- + -Y ${build.dir}/lib/${version}.jar + -Y ${bigdata.dir}/bigdata/lib/dsi-util*.jar + -Y ${bigdata.dir}/bigdata/lib/lgpl-utils*.jar + " /> + </java> + </target> <!-- @see <a href="https://sourceforge.net/apps/trac/bigdata/ticket/628" > Create a bigdata-client jar for the NSS REST API </a> @@ -516,7 +503,7 @@ <java jar="src/build/autojar/autojar.jar" fork="true" failonerror="true"> <arg line="-o ${build.dir}/${client-version}.jar -c ${build.dir}/${version}.jar - -v + -v -- com.bigdata.rdf.sail.webapp.client.*.class com.bigdata.rdf.properties.*.class @@ -526,88 +513,87 @@ </java> </target> - <!--depends="bundleJar"--> - <target name="war" depends="bundleJar, autojar-strip-fastutil" - description="Generates a WAR artifact."> + <!--depends="bundleJar"--> + <target name="war" depends="bundleJar, autojar-strip-fastutil" + description="Generates a WAR artifact."> <property name="war.dir" value="${build.dir}/staged-war" - description="The staging directory for the webapp."/> + description="The staging directory for the webapp."/> <property name="war.file" value="${build.dir}/bigdata.war" - description="The generated WAR file." /> + description="The generated WAR file." /> <delete file="${war.file}" - description="Remove the old WAR file."/> + description="Remove the old WAR file."/> <delete dir="${war.dir}" - description="Remove old WAR staging directory."/> - <echo message="Staging webapp to ${war.dir}"/> - <copy todir="${war.dir}" - includeemptydirs="yes" - preservelastmodified="true" - description="Stage the webapp to a temporary directory." - > - <fileset dir="bigdata-war/src"> - <include name="**/*"/> - <!-- The jetty.xml file is not used when deploying to a servlet container. --> - <exclude name="**/jetty.xml"/> + description="Remove old WAR staging directory."/> + <echo message="Staging webapp to ${war.dir}"/> + <copy todir="${war.dir}" + includeemptydirs="yes" + preservelastmodified="true" + description="Stage the webapp to a temporary directory." + > + <fileset dir="bigdata-war/src"> + <include name="**/*"/> + <!-- The jetty.xml file is not used when deploying to a servlet container. --> + <exclude name="**/jetty.xml"/> </fileset> - </copy> + </copy> <replace file="${war.dir}/WEB-INF/web.xml" - summary="true" - description="Override the default location of the RWStore.properties file."> + summary="true" + description="Override the default location of the RWStore.properties file."> <replacefilter token="WEB-INF/RWStore.properties" - value="../webapps/bigdata/WEB-INF/RWStore.properties" + value="../webapps/bigdata/WEB-INF/RWStore.properties" /> </replace> <echo message="Building webapp in ${war.file} from ${war.dir}"/> - <war destfile="${war.file}" - webxml="${war.dir}/WEB-INF/web.xml" - > - <!-- The staged WAR files. --> - <fileset dir="${war.dir}"/> + <war destfile="${war.file}" + webxml="${war.dir}/WEB-INF/web.xml" + > + <!-- The staged WAR files. --> + <fileset dir="${war.dir}"/> <!-- Copy the bigdata license. --> - <file file="${bigdata.dir}/LICENSE.txt"/> - <!-- Copy the top-level NOTICE file. --> - <file file="${bigdata.dir}/NOTICE"/> + <file file="${bigdata.dir}/LICENSE.txt"/> + <!-- Copy the top-level NOTICE file. --> + <file file="${bigdata.dir}/NOTICE"/> <!-- Copy all of the LEGAL directories. --> - <fileset dir="${bigdata.dir}/bigdata" includes="LEGAL/*"/> - <fileset dir="${bigdata.dir}/bigdata-rdf" includes="LEGAL/*"/> - <fileset dir="${bigdata.dir}/bigdata-sails" includes="LEGAL/*"/> - <fileset dir="${bigdata.dir}/bigdata-blueprints" includes="LEGAL/*"/> + <fileset dir="${bigdata.dir}/bigdata" includes="LEGAL/*"/> + <fileset dir="${bigdata.dir}/bigdata-rdf" includes="LEGAL/*"/> + <fileset dir="${bigdata.dir}/bigdata-sails" includes="LEGAL/*"/> <fileset dir="${bigdata.dir}/bigdata-gom" includes="LEGAL/*"/> - <fileset dir="${bigdata.dir}/bigdata-jini" includes="LEGAL/*"/> - <!-- bigdata jar plus some dependencies as filtered by autojar. - <lib file="${build.dir}/bigdataPlus.jar"/> --> - <!-- The stripped version of fastutil. --> - <lib file="${build.dir}/fastutil-stripped.jar"/> - <lib dir="${build.dir}/lib"> - <exclude name="fastutil*.jar"/> - <!-- jars bundled into "bigdata-plus" by autojar. - <exclude name="colt*.jar"/> - <exclude name="dsi-util*.jar"/> - <exclude name="lgpl-utils*.jar"/> - <exclude name="bigdata*.jar"/>--> - <!-- jars which are not currently used. --> - <exclude name="2p-*.jar"/> - <!-- test suite stuff is not needed. --> - <exclude name="junit*.jar"/> - <exclude name="sesame*testsuite*.jar"/> - <!-- osgi stuff is not needed. --> - <exclude name="bnd*.jar"/> - <!-- jetty / servlet / jsp jars not required for the WAR. --> - <exclude name="jetty*.jar"/> + <fileset dir="${bigdata.dir}/bigdata-jini" includes="LEGAL/*"/> + <!-- bigdata jar plus some dependencies as filtered by autojar. + <lib file="${build.dir}/bigdataPlus.jar"/> --> + <!-- The stripped version of fastutil. --> + <lib file="${build.dir}/fastutil-stripped.jar"/> + <lib dir="${build.dir}/lib"> + <exclude name="fastutil*.jar"/> + <!-- jars bundled into "bigdata-plus" by autojar. + <exclude name="colt*.jar"/> + <exclude name="dsi-util*.jar"/> + <exclude name="lgpl-utils*.jar"/> + <exclude name="bigdata*.jar"/>--> + <!-- jars which are not currently used. --> + <exclude name="2p-*.jar"/> + <!-- test suite stuff is not needed. --> + <exclude name="junit*.jar"/> + <exclude name="sesame*testsuite*.jar"/> + <!-- osgi stuff is not needed. --> + <exclude name="bnd*.jar"/> + <!-- jetty / servlet / jsp jars not required for the WAR. --> + <exclude name="jetty*.jar"/> <exclude name="servlet-api*.jar"/> - <!-- zookeeper only used in scale-out. --> - <exclude name="apache/zookeeper*.jar"/> - <!-- jini only used in scale-out. --> - <exclude name="jini/**/*.jar"/> - </lib> - <classes file="${war.dir}/WEB-INF/classes/log4j.properties"/> - </war> - </target> - + <!-- zookeeper only used in scale-out. --> + <exclude name="apache/zookeeper*.jar"/> + <!-- jini only used in scale-out. --> + <exclude name="jini/**/*.jar"/> + </lib> + <classes file="${war.dir}/WEB-INF/classes/log4j.properties"/> + </war> + </target> + <target name="banner" depends="jar" description="Displays the banner (verifies runtime classpath)."> <java classname="com.bigdata.Banner" failonerror="true" fork="false" logerror="true"> - <classpath refid="runtime.classpath" /> + <classpath refid="runtime.classpath" /> </java> </target> @@ -650,10 +636,10 @@ <mkdir dir="${LAS}" /> <!-- NAS/LAS directories must be read/write for the group. --> <chmod perm="ug+rw,o-rw"> - <fileset dir="${NAS}" /> + <fileset dir="${NAS}" /> </chmod> <chmod perm="ug+rw,o-rw"> - <fileset dir="${LAS}" /> + <fileset dir="${LAS}" /> </chmod> <!-- create subdirectories of NAS - should inherit permissions. --> <mkdir dir="${install.config.dir}" /> @@ -664,118 +650,118 @@ <mkdir dir="${install.dist.dir}" /> <!-- install configuration files. --> <copy toDir="${install.config.dir}"> - <fileset dir="${bigdata.dir}/src/resources/config"> - </fileset> + <fileset dir="${bigdata.dir}/src/resources/config"> + </fileset> </copy> <!-- install documentation. --> <copy toDir="${install.doc.dir}"> - <!-- javadoc. --> - <fileset dir="${build.dir}/docs" /> - <!-- common files from the root of the archive. --> - <!-- @todo cleanup LEGAL into one directory off the root in the src tree? --> - <fileset dir="${bigdata.dir}"> - <include name="LICENSE.txt" /> - <include name="overview.html" /> - <include name="README-JINI" /> - <include name="bigdata/LEGAL/*" /> - <include name="bigdata-jini/LEGAL/*" /> + <!-- javadoc. --> + <fileset dir="${build.dir}/docs" /> + <!-- common files from the root of the archive. --> + <!-- @todo cleanup LEGAL into one directory off the root in the src tree? --> + <fileset dir="${bigdata.dir}"> + <include name="LICENSE.txt" /> + <include name="overview.html" /> + <include name="README-JINI" /> + <include name="bigdata/LEGAL/*" /> + <include name="bigdata-jini/LEGAL/*" /> <include name="bigdata-rdf/LEGAL/*" /> - <include name="bigdata-sails/LEGAL/*" /> + <include name="bigdata-sails/LEGAL/*" /> <include name="bigdata-gom/LEGAL/*" /> - </fileset> + </fileset> </copy> <!-- install JARs. --> <copy toDir="${install.lib.dir}"> - <fileset dir="${build.dir}/lib" /> - <fileset file="${build.dir}/${version}.jar" /> + <fileset dir="${build.dir}/lib" /> + <fileset file="${build.dir}/${version}.jar" /> </copy> <!-- install scripts. --> <copy toDir="${install.bin.dir}"> - <fileset dir="src/resources/scripts"> - </fileset> + <fileset dir="src/resources/scripts"> + </fileset> </copy> <!-- parameter substitution. --> <property name="myclasspath" refid="install.classpath" /> <replace dir="${install.bin.dir}" summary="true"> - <replacefilter token="@FED@" value="${FED}" /> - <replacefilter token="@NAS@" value="${NAS}" /> - <replacefilter token="@LAS@" value="${LAS}" /> - <replacefilter token="@JAVA_HOME@" value="${JAVA_HOME}" /> - <replacefilter token="@JINI_CLASS_SERVER_PORT@" value="${JINI_CLASS_SERVER_PORT}" /> - <replacefilter token="@LOAD_BALANCER_PORT@" value="${LOAD_BALANCER_PORT}" /> - <replacefilter token="@REPORT_ALL@" value="${REPORT_ALL}" /> - <replacefilter token="@SYSSTAT_HOME@" value="${SYSSTAT_HOME}" /> - <replacefilter token="@USE_NIO@" value="${USE_NIO}" /> - <replacefilter token="@BIN_DIR@" value="${install.bin.dir}" /> - <replacefilter token="@LIB_DIR@" value="${install.lib.dir}" /> - <replacefilter token="@LOG_DIR@" value="${install.log.dir}" /> - <replacefilter token="@CONFIG_DIR@" value="${install.config.dir}" /> - <replacefilter token="@INSTALL_USER@" value="${install.user}" /> - <replacefilter token="@INSTALL_GROUP@" value="${install.group}" /> - <replacefilter token="@LOCK_CMD@" value="${LOCK_CMD}" /> - <replacefilter token="@LOCK_FILE@" value="${LOCK_FILE}" /> - <replacefilter token="@BIGDATA_CONFIG@" value="${bigdata.config}" /> - <replacefilter token="@JINI_CONFIG@" value="${jini.config}" /> - <replacefilter token="@POLICY_FILE@" value="${policyFile}" /> - <replacefilter token="@LOG4J_SOCKET_LOGGER_HOST@" value="${LOG4J_SOCKET_LOGGER_HOST}" /> - <replacefilter token="@LOG4J_SOCKET_LOGGER_PORT@" value="${LOG4J_SOCKET_LOGGER_PORT}" /> - <replacefilter token="@LOG4J_SOCKET_LOGGER_CONFIG@" value="${log4jServer.config}" /> - <replacefilter token="@LOG4J_DATE_PATTERN@" value="${LOG4J_DATE_PATTERN}" /> - <replacefilter token="@LOG4J_CONFIG@" value="${log4j.config}" /> - <replacefilter token="@LOGGING_CONFIG@" value="${logging.config}" /> - <replacefilter token="@ERROR_LOG@" value="${errorLog}" /> - <replacefilter token="@DETAIL_LOG@" value="${detailLog}" /> - <replacefilter token="@EVENT_LOG@" value="${eventLog}" /> - <replacefilter token="@RULE_LOG@" value="${ruleLog}" /> - <replacefilter token="@STATE_LOG@" value="${stateLog}" /> - <replacefilter token="@STATE_FILE@" value="${stateFile}" /> - <replacefilter token="@FORCE_KILL_ALL@" value="${forceKillAll}" /> - <replacefilter token="@NTP_MASTER@" value="${NTP_MASTER}" /> - <replacefilter token="@NTP_NETWORK@" value="${NTP_NETWORK}" /> - <replacefilter token="@NTP_NETMASK@" value="${NTP_NETMASK}" /> - <replacefilter token="@CLASSPATH@" value="${myclasspath}" /> + <replacefilter token="@FED@" value="${FED}" /> + <replacefilter token="@NAS@" value="${NAS}" /> + <replacefilter token="@LAS@" value="${LAS}" /> + <replacefilter token="@JAVA_HOME@" value="${JAVA_HOME}" /> + <replacefilter token="@JINI_CLASS_SERVER_PORT@" value="${JINI_CLASS_SERVER_PORT}" /> + <replacefilter token="@LOAD_BALANCER_PORT@" value="${LOAD_BALANCER_PORT}" /> + <replacefilter token="@REPORT_ALL@" value="${REPORT_ALL}" /> + <replacefilter token="@SYSSTAT_HOME@" value="${SYSSTAT_HOME}" /> + <replacefilter token="@USE_NIO@" value="${USE_NIO}" /> + <replacefilter token="@BIN_DIR@" value="${install.bin.dir}" /> + <replacefilter token="@LIB_DIR@" value="${install.lib.dir}" /> + <replacefilter token="@LOG_DIR@" value="${install.log.dir}" /> + <replacefilter token="@CONFIG_DIR@" value="${install.config.dir}" /> + <replacefilter token="@INSTALL_USER@" value="${install.user}" /> + <replacefilter token="@INSTALL_GROUP@" value="${install.group}" /> + <replacefilter token="@LOCK_CMD@" value="${LOCK_CMD}" /> + <replacefilter token="@LOCK_FILE@" value="${LOCK_FILE}" /> + <replacefilter token="@BIGDATA_CONFIG@" value="${bigdata.config}" /> + <replacefilter token="@JINI_CONFIG@" value="${jini.config}" /> + <replacefilter token="@POLICY_FILE@" value="${policyFile}" /> + <replacefilter token="@LOG4J_SOCKET_LOGGER_HOST@" value="${LOG4J_SOCKET_LOGGER_HOST}" /> + <replacefilter token="@LOG4J_SOCKET_LOGGER_PORT@" value="${LOG4J_SOCKET_LOGGER_PORT}" /> + <replacefilter token="@LOG4J_SOCKET_LOGGER_CONFIG@" value="${log4jServer.config}... [truncated message content] |
From: <jer...@us...> - 2014-05-08 03:13:01
|
Revision: 8230 http://sourceforge.net/p/bigdata/code/8230 Author: jeremy_carroll Date: 2014-05-08 03:12:55 +0000 (Thu, 08 May 2014) Log Message: ----------- externalized Japanese, Russian and German strings to address encoding issues Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractAnalyzerFactoryTest.java Added Paths: ----------- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/NonEnglishExamples.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/examples.properties Removed Paths: ------------- branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs Deleted: branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs 2014-05-08 02:57:15 UTC (rev 8229) +++ branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs 2014-05-08 03:12:55 UTC (rev 8230) @@ -1,2 +0,0 @@ -eclipse.preferences.version=1 -encoding//bigdata/src/test/com/bigdata/search/AbstractAnalyzerFactoryTest.java=UTF-8 Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractAnalyzerFactoryTest.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractAnalyzerFactoryTest.java 2014-05-08 02:57:15 UTC (rev 8229) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractAnalyzerFactoryTest.java 2014-05-08 03:12:55 UTC (rev 8230) @@ -57,7 +57,7 @@ String text, String spaceSeparated) throws IOException { compareTokenStream(getAnalyzer(lang, stopWordsSignificant), text, - spaceSeparated.split(" ")); + spaceSeparated.split(" ")); //$NON-NLS-1$ } private void compareTokenStream(Analyzer a, String text, String expected[]) throws IOException { TokenStream s = a.tokenStream(null, new StringReader(text)); @@ -73,20 +73,20 @@ public void testEnglishFilterStopWords() throws IOException { - for (String lang: new String[]{ "eng", null, "" }) { + for (String lang: new String[]{ "eng", null, "" }) { //$NON-NLS-1$ //$NON-NLS-2$ comparisonTest(lang, true, - "The test to end all tests! Forever.", - "test end all tests forever" + "The test to end all tests! Forever.", //$NON-NLS-1$ + "test end all tests forever" //$NON-NLS-1$ ); } } public void testEnglishNoFilter() throws IOException { - for (String lang: new String[]{ "eng", null, "" }) { + for (String lang: new String[]{ "eng", null, "" }) { //$NON-NLS-1$ //$NON-NLS-2$ comparisonTest(lang, false, - "The test to end all tests! Forever.", - "the test to end all tests forever" + "The test to end all tests! Forever.", //$NON-NLS-1$ + "the test to end all tests forever" //$NON-NLS-1$ ); } } @@ -95,11 +95,11 @@ // 'de' is more standard, but the DefaultAnalyzerFactory does not // implement 'de' correctly. public void testGermanFilterStopWords() throws IOException { - comparisonTest("ger", + comparisonTest("ger", //$NON-NLS-1$ true, - "Hanoi - Im Streit um die Vorherrschaft im Südchinesischen Meer ist es zu einer " + - "erneuten Auseinandersetzung gekommen:", - "hanoi strei um vorherrschaf sudchinesisch meer zu erneu auseinandersetzung gekomm" + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.10") + //$NON-NLS-1$ + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.11"), //$NON-NLS-1$ + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.12") //$NON-NLS-1$ ); } @@ -108,56 +108,54 @@ // 'ru' is more standard, but the DefaultAnalyzerFactory does not // implement 'ru' correctly. public void testRussianFilterStopWords() throws IOException { - comparisonTest("rus", + comparisonTest("rus", //$NON-NLS-1$ true, // I hope this is not offensive text. - "Они ответственны полностью и за ту, и за другую трагедию. " + - "Мы уже получили данные от сочувствующих нам офицеров СБУ.", - "ответствен полност ту друг трагед получ дан сочувств нам офицер сбу" + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.14") + //$NON-NLS-1$ + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.15"), //$NON-NLS-1$ + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.16") //$NON-NLS-1$ ); } public void testGermanNoStopWords() throws IOException { - comparisonTest("ger", + comparisonTest("ger", //$NON-NLS-1$ false, - "Hanoi - Im Streit um die Vorherrschaft im Südchinesischen Meer ist es zu einer " + - "erneuten Auseinandersetzung gekommen:", - "hanoi im strei um die vorherrschaf im sudchinesisch meer ist es zu ein erneu auseinandersetzung gekomm" + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.18") + //$NON-NLS-1$ + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.19"), //$NON-NLS-1$ + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.20") //$NON-NLS-1$ ); } public void testRussianNoStopWords() throws IOException { - comparisonTest("rus", + comparisonTest("rus", //$NON-NLS-1$ false, - // I hope this is not offensive text. - "Они ответственны полностью и за ту, и за другую трагедию. " + - "Мы уже получили данные от сочувствующих нам офицеров СБУ.", - "он ответствен полност и за ту и за друг трагед мы уж получ дан от сочувств нам офицер сбу" + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.22") + //$NON-NLS-1$ + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.23"), //$NON-NLS-1$ + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.24") //$NON-NLS-1$ ); } public void testJapanese() throws IOException { for (boolean filterStopWords: new Boolean[]{true, false}) { - comparisonTest("jpn", + comparisonTest("jpn", //$NON-NLS-1$ filterStopWords, - // I hope this is not offensive text. - "高林純示 生態学研究センター教授らの研究グループと松井健二 山口大学医学系研究科(農学系)教授らの研究グループは、", - "高林 林純 純示 生態 態学 学研 研究 究セ セン ンタ ター ー教 教授 授ら らの の研 研究 究グ グル ルー " + - "ープ プと と松 松井 井健 健二 山口 口大 大学 学医 医学 学系 系研 " + - "研究 究科 農学 学系 教授 授ら らの の研 研究 究グ グル ルー ープ プは"); + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.26"), //$NON-NLS-1$ + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.27") + //$NON-NLS-1$ + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.28") + //$NON-NLS-1$ + NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.29")); //$NON-NLS-1$ } } public void testConfiguredLanguages() { - checkConfig("BrazilianAnalyzer", "por", "pt"); - checkConfig("ChineseAnalyzer", "zho", "chi", "zh"); - checkConfig("CJKAnalyzer", "jpn", "ja", "kor", "ko"); - checkConfig("CzechAnalyzer", "ces", "cze", "cs"); - checkConfig("DutchAnalyzer", "dut", "nld", "nl"); - checkConfig("GermanAnalyzer", "deu", "ger", "de"); - checkConfig("GreekAnalyzer", "gre", "ell", "el"); - checkConfig("RussianAnalyzer", "rus", "ru"); - checkConfig("ThaiAnalyzer", "th", "tha"); - checkConfig("StandardAnalyzer", "en", "eng", "", null); + checkConfig("BrazilianAnalyzer", "por", "pt"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ + checkConfig("ChineseAnalyzer", "zho", "chi", "zh"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ + checkConfig("CJKAnalyzer", "jpn", "ja", "kor", "ko"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$ + checkConfig("CzechAnalyzer", "ces", "cze", "cs"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ + checkConfig("DutchAnalyzer", "dut", "nld", "nl"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ + checkConfig("GermanAnalyzer", "deu", "ger", "de"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ + checkConfig("GreekAnalyzer", "gre", "ell", "el"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ + checkConfig("RussianAnalyzer", "rus", "ru"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ + checkConfig("ThaiAnalyzer", "th", "tha"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ + checkConfig("StandardAnalyzer", "en", "eng", "", null); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ } private void checkConfig(String classname, String ...langs) { @@ -166,7 +164,7 @@ // if (lang != null && lang.length()==3) { assertEquals(classname, getAnalyzer(lang,true).getClass().getSimpleName()); - assertEquals(classname, getAnalyzer(lang+"-x-foobar",true).getClass().getSimpleName()); + assertEquals(classname, getAnalyzer(lang+NonEnglishExamples.getString("AbstractAnalyzerFactoryTest.0"),true).getClass().getSimpleName()); //$NON-NLS-1$ } } Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/NonEnglishExamples.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/NonEnglishExamples.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/NonEnglishExamples.java 2014-05-08 03:12:55 UTC (rev 8230) @@ -0,0 +1,21 @@ +package com.bigdata.search; + +import java.util.MissingResourceException; +import java.util.ResourceBundle; + +public class NonEnglishExamples { + private static final String BUNDLE_NAME = "com.bigdata.search.examples"; //$NON-NLS-1$ + + private static final ResourceBundle RESOURCE_BUNDLE = ResourceBundle.getBundle(BUNDLE_NAME); + + private NonEnglishExamples() { + } + + public static String getString(String key) { + try { + return RESOURCE_BUNDLE.getString(key); + } catch (MissingResourceException e) { + return '!' + key + '!'; + } + } +} Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/examples.properties =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/examples.properties (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/examples.properties 2014-05-08 03:12:55 UTC (rev 8230) @@ -0,0 +1,17 @@ +AbstractAnalyzerFactoryTest.0=-x-foobar +AbstractAnalyzerFactoryTest.10=Hanoi - Im Streit um die Vorherrschaft im S\xFCdchinesischen Meer ist es zu einer +AbstractAnalyzerFactoryTest.11=erneuten Auseinandersetzung gekommen: +AbstractAnalyzerFactoryTest.12=hanoi strei um vorherrschaf sudchinesisch meer zu erneu auseinandersetzung gekomm +AbstractAnalyzerFactoryTest.14=\u041E\u043D\u0438 \u043E\u0442\u0432\u0435\u0442\u0441\u0442\u0432\u0435\u043D\u043D\u044B \u043F\u043E\u043B\u043D\u043E\u0441\u0442\u044C\u044E \u0438 \u0437\u0430 \u0442\u0443, \u0438 \u0437\u0430 \u0434\u0440\u0443\u0433\u0443\u044E \u0442\u0440\u0430\u0433\u0435\u0434\u0438\u044E. +AbstractAnalyzerFactoryTest.15=\u041C\u044B \u0443\u0436\u0435 \u043F\u043E\u043B\u0443\u0447\u0438\u043B\u0438 \u0434\u0430\u043D\u043D\u044B\u0435 \u043E\u0442 \u0441\u043E\u0447\u0443\u0432\u0441\u0442\u0432\u0443\u044E\u0449\u0438\u0445 \u043D\u0430\u043C \u043E\u0444\u0438\u0446\u0435\u0440\u043E\u0432 \u0421\u0411\u0423. +AbstractAnalyzerFactoryTest.16=\u043E\u0442\u0432\u0435\u0442\u0441\u0442\u0432\u0435\u043D \u043F\u043E\u043B\u043D\u043E\u0441\u0442 \u0442\u0443 \u0434\u0440\u0443\u0433 \u0442\u0440\u0430\u0433\u0435\u0434 \u043F\u043E\u043B\u0443\u0447 \u0434\u0430\u043D \u0441\u043E\u0447\u0443\u0432\u0441\u0442\u0432 \u043D\u0430\u043C \u043E\u0444\u0438\u0446\u0435\u0440 \u0441\u0431\u0443 +AbstractAnalyzerFactoryTest.18=Hanoi - Im Streit um die Vorherrschaft im S\xFCdchinesischen Meer ist es zu einer +AbstractAnalyzerFactoryTest.19=erneuten Auseinandersetzung gekommen: +AbstractAnalyzerFactoryTest.20=hanoi im strei um die vorherrschaf im sudchinesisch meer ist es zu ein erneu auseinandersetzung gekomm +AbstractAnalyzerFactoryTest.22=\u041E\u043D\u0438 \u043E\u0442\u0432\u0435\u0442\u0441\u0442\u0432\u0435\u043D\u043D\u044B \u043F\u043E\u043B\u043D\u043E\u0441\u0442\u044C\u044E \u0438 \u0437\u0430 \u0442\u0443, \u0438 \u0437\u0430 \u0434\u0440\u0443\u0433\u0443\u044E \u0442\u0440\u0430\u0433\u0435\u0434\u0438\u044E. +AbstractAnalyzerFactoryTest.23=\u041C\u044B \u0443\u0436\u0435 \u043F\u043E\u043B\u0443\u0447\u0438\u043B\u0438 \u0434\u0430\u043D\u043D\u044B\u0435 \u043E\u0442 \u0441\u043E\u0447\u0443\u0432\u0441\u0442\u0432\u0443\u044E\u0449\u0438\u0445 \u043D\u0430\u043C \u043E\u0444\u0438\u0446\u0435\u0440\u043E\u0432 \u0421\u0411\u0423. +AbstractAnalyzerFactoryTest.24=\u043E\u043D \u043E\u0442\u0432\u0435\u0442\u0441\u0442\u0432\u0435\u043D \u043F\u043E\u043B\u043D\u043E\u0441\u0442 \u0438 \u0437\u0430 \u0442\u0443 \u0438 \u0437\u0430 \u0434\u0440\u0443\u0433 \u0442\u0440\u0430\u0433\u0435\u0434 \u043C\u044B \u0443\u0436 \u043F\u043E\u043B\u0443\u0447 \u0434\u0430\u043D \u043E\u0442 \u0441\u043E\u0447\u0443\u0432\u0441\u0442\u0432 \u043D\u0430\u043C \u043E\u0444\u0438\u0446\u0435\u0440 \u0441\u0431\u0443 +AbstractAnalyzerFactoryTest.26=\u9AD8\u6797\u7D14\u793A \u751F\u614B\u5B66\u7814\u7A76\u30BB\u30F3\u30BF\u30FC\u6559\u6388\u3089\u306E\u7814\u7A76\u30B0\u30EB\u30FC\u30D7\u3068\u677E\u4E95\u5065\u4E8C \u5C71\u53E3\u5927\u5B66\u533B\u5B66\u7CFB\u7814\u7A76\u79D1\uFF08\u8FB2\u5B66\u7CFB\uFF09\u6559\u6388\u3089\u306E\u7814\u7A76\u30B0\u30EB\u30FC\u30D7\u306F\u3001 +AbstractAnalyzerFactoryTest.27=\u9AD8\u6797 \u6797\u7D14 \u7D14\u793A \u751F\u614B \u614B\u5B66 \u5B66\u7814 \u7814\u7A76 \u7A76\u30BB \u30BB\u30F3 \u30F3\u30BF \u30BF\u30FC \u30FC\u6559 \u6559\u6388 \u6388\u3089 \u3089\u306E \u306E\u7814 \u7814\u7A76 \u7A76\u30B0 \u30B0\u30EB \u30EB\u30FC +AbstractAnalyzerFactoryTest.28=\u30FC\u30D7 \u30D7\u3068 \u3068\u677E \u677E\u4E95 \u4E95\u5065 \u5065\u4E8C \u5C71\u53E3 \u53E3\u5927 \u5927\u5B66 \u5B66\u533B \u533B\u5B66 \u5B66\u7CFB \u7CFB\u7814 +AbstractAnalyzerFactoryTest.29=\u7814\u7A76 \u7A76\u79D1 \u8FB2\u5B66 \u5B66\u7CFB \u6559\u6388 \u6388\u3089 \u3089\u306E \u306E\u7814 \u7814\u7A76 \u7A76\u30B0 \u30B0\u30EB \u30EB\u30FC \u30FC\u30D7 \u30D7\u306F This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2014-05-08 02:57:18
|
Revision: 8229 http://sourceforge.net/p/bigdata/code/8229 Author: mrpersonick Date: 2014-05-08 02:57:15 +0000 (Thu, 08 May 2014) Log Message: ----------- changed the javadoc Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailFactory.java Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailFactory.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailFactory.java 2014-05-08 02:48:41 UTC (rev 8228) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailFactory.java 2014-05-08 02:57:15 UTC (rev 8229) @@ -28,6 +28,8 @@ import java.util.List; import java.util.Properties; +import org.openrdf.repository.Repository; + import com.bigdata.journal.BufferMode; import com.bigdata.journal.Journal; import com.bigdata.rdf.axioms.NoAxioms; @@ -67,7 +69,7 @@ RDR, /** - * Text index on or off. On by default. + * Text index on or off. Off by default. */ TextIndex, This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jer...@us...> - 2014-05-08 02:48:47
|
Revision: 8228 http://sourceforge.net/p/bigdata/code/8228 Author: jeremy_carroll Date: 2014-05-08 02:48:41 +0000 (Thu, 08 May 2014) Log Message: ----------- Tests for Language Range Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestAll.java Added Paths: ----------- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestLanguageRange.java Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java 2014-05-08 01:52:09 UTC (rev 8227) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java 2014-05-08 02:48:41 UTC (rev 8228) @@ -126,7 +126,7 @@ public class ConfigurableAnalyzerFactory implements IAnalyzerFactory { final private static transient Logger log = Logger.getLogger(ConfigurableAnalyzerFactory.class); - private static class LanguageRange implements Comparable<LanguageRange> { + static class LanguageRange implements Comparable<LanguageRange> { private final String range[]; private final String full; @@ -173,6 +173,10 @@ public int hashCode() { return full.hashCode(); } + + public boolean extendedFilterMatch(String langTag) { + return extendedFilterMatch(langTag.toLowerCase(Locale.ROOT).split("-")); + } // See RFC 4647, 3.3.2 public boolean extendedFilterMatch(String[] language) { Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestAll.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestAll.java 2014-05-08 01:52:09 UTC (rev 8227) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestAll.java 2014-05-08 02:48:41 UTC (rev 8228) @@ -72,6 +72,8 @@ // search backed by EDS. suite.addTest(proxySuite(new TestEDS("EDS Search"),"EDS")); + + suite.addTestSuite(TestLanguageRange.class); return suite; Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestLanguageRange.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestLanguageRange.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestLanguageRange.java 2014-05-08 02:48:41 UTC (rev 8228) @@ -0,0 +1,70 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on May 7, 2014 + */ +package com.bigdata.search; + +import com.bigdata.search.ConfigurableAnalyzerFactory.LanguageRange; + +import junit.framework.TestCase2; + +public class TestLanguageRange extends TestCase2 { + + public TestLanguageRange() { + } + + + public TestLanguageRange(String name) { + super(name); + } + + private void match(String range, String lang) { + LanguageRange lr = new LanguageRange(range.toLowerCase()); + assertTrue(lr.extendedFilterMatch(lang)); + } + + private void nomatch(String range, String lang) { + LanguageRange lr = new LanguageRange(range.toLowerCase()); + assertFalse(lr.extendedFilterMatch(lang)); + } + + + public void testRFC4647() { + for (String range: new String[]{"de-DE", "de-*-DE"}) { + match(range, "de-DE"); + match(range, "de-Latn-DE"); + match(range, "de-Latf-DE"); + match(range, "de-DE-x-goethe"); + match(range, "de-Latn-DE-1996"); + match(range, "de-Deva-DE-1996"); + nomatch(range, "de"); + nomatch(range, "de-x-DE"); + nomatch(range, "de-Deva"); + } + + } + + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2014-05-08 01:52:14
|
Revision: 8227 http://sourceforge.net/p/bigdata/code/8227 Author: mrpersonick Date: 2014-05-08 01:52:09 +0000 (Thu, 08 May 2014) Log Message: ----------- fixed the gremlin installer, added a loadGraphML method to all BigdataGraph impls Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java branches/BIGDATA_RELEASE_1_3_0/build.xml Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java 2014-05-08 01:49:33 UTC (rev 8226) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java 2014-05-08 01:52:09 UTC (rev 8227) @@ -54,6 +54,7 @@ import com.tinkerpop.blueprints.GraphQuery; import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.util.DefaultGraphQuery; +import com.tinkerpop.blueprints.util.io.graphml.GraphMLReader; /** * A base class for a Blueprints wrapper around a bigdata back-end. @@ -93,6 +94,13 @@ return getClass().getSimpleName().toLowerCase(); } + /** + * Post a GraphML file to the remote server. (Bulk-upload operation.) + */ + public void loadGraphML(final String file) throws Exception { + GraphMLReader.inputGraph(this, file); + } + protected abstract RepositoryConnection cxn() throws Exception; // public BigdataSailRepositoryConnection getConnection() { Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java 2014-05-08 01:49:33 UTC (rev 8226) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java 2014-05-08 01:52:09 UTC (rev 8227) @@ -80,8 +80,9 @@ /** * Post a GraphML file to the remote server. (Bulk-upload operation.) */ - public long postGraphML(final String file) throws Exception { - return this.repo.getRemoteRepository().postGraphML(file); + @Override + public void loadGraphML(final String file) throws Exception { + this.repo.getRemoteRepository().postGraphML(file); } /** Modified: branches/BIGDATA_RELEASE_1_3_0/build.xml =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/build.xml 2014-05-08 01:49:33 UTC (rev 8226) +++ branches/BIGDATA_RELEASE_1_3_0/build.xml 2014-05-08 01:52:09 UTC (rev 8227) @@ -2530,7 +2530,7 @@ <delete file="${build.dir}/gremlin-groovy-2.5.0.zip"/> </target> - <target name="install-gremlin" depends="prepare,compile,jar"> + <target name="install-gremlin" depends="prepare,compile,jar,bundle"> <delete> <fileset dir="${build.dir}/gremlin-groovy-2.5.0/lib"> <include name="blueprints-graph-sail-2.5.0.jar"/> @@ -2577,12 +2577,17 @@ </fileset> </delete> <copy toDir="${build.dir}/gremlin-groovy-2.5.0/lib" flatten="true"> + <!-- <fileset dir="${bigdata.dir}/bigdata-rdf/lib"> <include name="openrdf-sesame-${sesame.version}-onejar.jar" /> </fileset> <fileset dir="${bigdata.dir}/bigdata-sails/lib/httpcomponents"> <include name="httpmime-${apache.httpmime.version}.jar" /> </fileset> + --> + <fileset dir="${build.dir}/lib"> + <include name="*.jar" /> + </fileset> <fileset dir="${build.dir}"> <include name="${version}.jar" /> </fileset> @@ -2594,9 +2599,10 @@ 1. Start the gremlin console: > ./${build.dir}/gremlin-groovy-2.5.0/bin/gremlin.sh 2. Connect to the bigdata server: - > g = com.bigdata.blueprints.BigdataGraphFactory.connect("http://localhost:9999/bigdata") + gremlin> import com.bigdata.blueprints.* + gremlin> g = BigdataGraphFactory.connect("http://localhost:9999") 3. Don't forget to shut down the connection when you're done: - > g.shutdown() + gremlin> g.shutdown() </echo> </target> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jer...@us...> - 2014-05-08 01:49:37
|
Revision: 8226 http://sourceforge.net/p/bigdata/code/8226 Author: jeremy_carroll Date: 2014-05-08 01:49:33 +0000 (Thu, 08 May 2014) Log Message: ----------- Tests for the AnalyzerFactory's. The tests are for their shared behavior. Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestAll.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java Added Paths: ----------- branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractAnalyzerFactoryTest.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestConfigurableAsDefaultAnalyzerFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestDefaultAnalyzerFactory.java Added: branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs 2014-05-08 01:49:33 UTC (rev 8226) @@ -0,0 +1,2 @@ +eclipse.preferences.version=1 +encoding//bigdata/src/test/com/bigdata/search/AbstractAnalyzerFactoryTest.java=UTF-8 Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java 2014-05-08 01:49:13 UTC (rev 8225) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java 2014-05-08 01:49:33 UTC (rev 8226) @@ -547,7 +547,7 @@ // RussianAnalyzer is missing any way to access stop words. if (RussianAnalyzer.class.equals(cls) && useDefaultStopWords()) { - return new AnalyzerPair(languageRange, new RussianAnalyzer(Version.LUCENE_CURRENT, Collections.EMPTY_SET), new RussianAnalyzer(Version.LUCENE_CURRENT)); + return new AnalyzerPair(languageRange, new RussianAnalyzer(Version.LUCENE_CURRENT), new RussianAnalyzer(Version.LUCENE_CURRENT, Collections.EMPTY_SET)); } return new VersionSetAnalyzerPair(this, cls); } @@ -612,7 +612,8 @@ */ private static final int MAX_LANG_CACHE_SIZE = 500; - private final String defaultLanguage; + private String defaultLanguage; + private final FullTextIndex<?> fullTextIndex; public ConfigurableAnalyzerFactory(final FullTextIndex<?> fullTextIndex) { @@ -621,9 +622,9 @@ if (fullTextIndex == null) throw new IllegalArgumentException(); - defaultLanguage = getDefaultLanguage(fullTextIndex); + this.fullTextIndex = fullTextIndex; - final Properties properties = initProperties(fullTextIndex); + final Properties properties = initProperties(); final Map<String, ConfigOptionsToAnalyzer> analyzers = new HashMap<String, ConfigOptionsToAnalyzer>(); @@ -686,6 +687,12 @@ } } + private String getDefaultLanguage() { + if (defaultLanguage == null) { + defaultLanguage = getDefaultLanguage(fullTextIndex); + } + return defaultLanguage; + } private static boolean hasConstructor(Class<? extends Analyzer> cls, Class<?> ... parameterTypes) { return getConstructor(cls, parameterTypes) != null; @@ -731,7 +738,7 @@ } - protected Properties initProperties(final FullTextIndex<?> fullTextIndex) { + protected Properties initProperties() { final Properties parentProperties = fullTextIndex.getProperties(); Properties myProps; if (Boolean.getBoolean(parentProperties.getProperty(Options.INCLUDE_DEFAULTS, Options.DEFAULT_INCLUDE_DEFAULTS))) { @@ -773,7 +780,8 @@ public Analyzer getAnalyzer(String languageCode, boolean filterStopwords) { if (languageCode == null || languageCode.equals("")) { - languageCode = defaultLanguage; + + languageCode = getDefaultLanguage(); } AnalyzerPair pair = langTag2AnalyzerPair.get(languageCode); Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractAnalyzerFactoryTest.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractAnalyzerFactoryTest.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractAnalyzerFactoryTest.java 2014-05-08 01:49:33 UTC (rev 8226) @@ -0,0 +1,174 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on May 7, 2014 + */ +package com.bigdata.search; + +import java.io.IOException; +import java.io.StringReader; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.TermAttribute; + +public abstract class AbstractAnalyzerFactoryTest extends AbstractSearchTest { + + public AbstractAnalyzerFactoryTest() { + } + + public AbstractAnalyzerFactoryTest(String arg0) { + super(arg0); + } + + public void setUp() throws Exception { + super.setUp(); + init(getExtraProperties()); + } + abstract String[] getExtraProperties(); + + private Analyzer getAnalyzer(String lang, boolean filterStopWords) { + return getNdx().getAnalyzer(lang, filterStopWords); + } + + private void comparisonTest(String lang, + boolean stopWordsSignificant, + String text, + String spaceSeparated) throws IOException { + compareTokenStream(getAnalyzer(lang, stopWordsSignificant), text, + spaceSeparated.split(" ")); + } + private void compareTokenStream(Analyzer a, String text, String expected[]) throws IOException { + TokenStream s = a.tokenStream(null, new StringReader(text)); + int ix = 0; + while (s.incrementToken()) { + final TermAttribute term = s.getAttribute(TermAttribute.class); + final String word = term.term(); + assertTrue(ix < expected.length); + assertEquals(word, expected[ix++]); + } + assertEquals(ix, expected.length); + } + + + public void testEnglishFilterStopWords() throws IOException { + for (String lang: new String[]{ "eng", null, "" }) { + comparisonTest(lang, + true, + "The test to end all tests! Forever.", + "test end all tests forever" + ); + } + } + public void testEnglishNoFilter() throws IOException { + for (String lang: new String[]{ "eng", null, "" }) { + comparisonTest(lang, + false, + "The test to end all tests! Forever.", + "the test to end all tests forever" + ); + } + } + + // Note we careful use a three letter language code for german. + // 'de' is more standard, but the DefaultAnalyzerFactory does not + // implement 'de' correctly. + public void testGermanFilterStopWords() throws IOException { + comparisonTest("ger", + true, + "Hanoi - Im Streit um die Vorherrschaft im Südchinesischen Meer ist es zu einer " + + "erneuten Auseinandersetzung gekommen:", + "hanoi strei um vorherrschaf sudchinesisch meer zu erneu auseinandersetzung gekomm" + ); + + } + + // Note we careful use a three letter language code for Russian. + // 'ru' is more standard, but the DefaultAnalyzerFactory does not + // implement 'ru' correctly. + public void testRussianFilterStopWords() throws IOException { + comparisonTest("rus", + true, + // I hope this is not offensive text. + "Они ответственны полностью и за ту, и за другую трагедию. " + + "Мы уже получили данные от сочувствующих нам офицеров СБУ.", + "ответствен полност ту друг трагед получ дан сочувств нам офицер сбу" + ); + + } + public void testGermanNoStopWords() throws IOException { + comparisonTest("ger", + false, + "Hanoi - Im Streit um die Vorherrschaft im Südchinesischen Meer ist es zu einer " + + "erneuten Auseinandersetzung gekommen:", + "hanoi im strei um die vorherrschaf im sudchinesisch meer ist es zu ein erneu auseinandersetzung gekomm" + ); + + } + public void testRussianNoStopWords() throws IOException { + comparisonTest("rus", + false, + // I hope this is not offensive text. + "Они ответственны полностью и за ту, и за другую трагедию. " + + "Мы уже получили данные от сочувствующих нам офицеров СБУ.", + "он ответствен полност и за ту и за друг трагед мы уж получ дан от сочувств нам офицер сбу" + ); + + } + public void testJapanese() throws IOException { + for (boolean filterStopWords: new Boolean[]{true, false}) { + comparisonTest("jpn", + filterStopWords, + // I hope this is not offensive text. + "高林純示 生態学研究センター教授らの研究グループと松井健二 山口大学医学系研究科(農学系)教授らの研究グループは、", + "高林 林純 純示 生態 態学 学研 研究 究セ セン ンタ ター ー教 教授 授ら らの の研 研究 究グ グル ルー " + + "ープ プと と松 松井 井健 健二 山口 口大 大学 学医 医学 学系 系研 " + + "研究 究科 農学 学系 教授 授ら らの の研 研究 究グ グル ルー ープ プは"); + } + } + public void testConfiguredLanguages() { + checkConfig("BrazilianAnalyzer", "por", "pt"); + checkConfig("ChineseAnalyzer", "zho", "chi", "zh"); + checkConfig("CJKAnalyzer", "jpn", "ja", "kor", "ko"); + checkConfig("CzechAnalyzer", "ces", "cze", "cs"); + checkConfig("DutchAnalyzer", "dut", "nld", "nl"); + checkConfig("GermanAnalyzer", "deu", "ger", "de"); + checkConfig("GreekAnalyzer", "gre", "ell", "el"); + checkConfig("RussianAnalyzer", "rus", "ru"); + checkConfig("ThaiAnalyzer", "th", "tha"); + checkConfig("StandardAnalyzer", "en", "eng", "", null); + } + + private void checkConfig(String classname, String ...langs) { + for (String lang:langs) { + // The DefaultAnalyzerFactory only works for language tags of length exactly three. +// if (lang != null && lang.length()==3) + { + assertEquals(classname, getAnalyzer(lang,true).getClass().getSimpleName()); + assertEquals(classname, getAnalyzer(lang+"-x-foobar",true).getClass().getSimpleName()); + } + } + + } +} Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java 2014-05-08 01:49:13 UTC (rev 8225) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java 2014-05-08 01:49:33 UTC (rev 8226) @@ -1,3 +1,29 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on May 7, 2014 + */ package com.bigdata.search; import java.util.Properties; @@ -2,3 +28,2 @@ -import com.bigdata.btree.IndexMetadata; import com.bigdata.journal.IIndexManager; @@ -11,7 +36,6 @@ private String namespace; private IIndexManager indexManager; private FullTextIndex<Long> ndx; - private IndexMetadata indexMetadata; private Properties properties; public AbstractSearchTest() { @@ -22,19 +46,29 @@ } void init(String ...propertyValuePairs) { - namespace = getName(); - properties = getProperties(); + namespace = getClass().getName()+"#"+getName(); + indexManager = getStore(); + properties = (Properties) getProperties().clone(); + ndx = createFullTextIndex(namespace, properties, propertyValuePairs); + } + + private FullTextIndex<Long> createFullTextIndex(String namespace, Properties properties, String ...propertyValuePairs) { for (int i=0; i<propertyValuePairs.length; ) { properties.setProperty(propertyValuePairs[i++], propertyValuePairs[i++]); } - indexManager = getStore(); - ndx = new FullTextIndex<Long>(indexManager, namespace, ITx.UNISOLATED, properties); + FullTextIndex<Long> ndx = new FullTextIndex<Long>(indexManager, namespace, ITx.UNISOLATED, properties); ndx.create(); - indexMetadata = ndx.getIndex().getIndexMetadata(); - } + return ndx; + } + + FullTextIndex<Long> createFullTextIndex(String namespace, String ...propertyValuePairs) { + return createFullTextIndex(namespace, getProperties(), propertyValuePairs); + } public void tearDown() throws Exception { - indexManager.destroy(); + if (indexManager != null) { + indexManager.destroy(); + } super.tearDown(); } @@ -54,15 +88,8 @@ return ndx; } - IndexMetadata getIndexMetadata() { - return indexMetadata; - } - - Properties getSearchProperties() { return properties; } - - } Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestAll.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestAll.java 2014-05-08 01:49:13 UTC (rev 8225) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestAll.java 2014-05-08 01:49:33 UTC (rev 8226) @@ -104,6 +104,14 @@ // test verifies search index is restart safe. suite.addTestSuite(TestSearchRestartSafe.class); + + // Check behavior of DefaultAnalyzerFactory, see also trac 915 + suite.addTestSuite(TestDefaultAnalyzerFactory.class); + + // Check default behavior of ConfigurableAnalyzerFactory + // which is intended to be the same as the intended + // behavior of DefaultAnalyzerFactory + suite.addTestSuite(TestConfigurableAsDefaultAnalyzerFactory.class); return suite; } Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestConfigurableAsDefaultAnalyzerFactory.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestConfigurableAsDefaultAnalyzerFactory.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestConfigurableAsDefaultAnalyzerFactory.java 2014-05-08 01:49:33 UTC (rev 8226) @@ -0,0 +1,43 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on May 7, 2014 + */ +package com.bigdata.search; + +public class TestConfigurableAsDefaultAnalyzerFactory extends AbstractAnalyzerFactoryTest { + + public TestConfigurableAsDefaultAnalyzerFactory() { + } + + public TestConfigurableAsDefaultAnalyzerFactory(String arg0) { + super(arg0); + } + + @Override + String[] getExtraProperties() { + return new String[]{FullTextIndex.Options.ANALYZER_FACTORY_CLASS, ConfigurableAnalyzerFactory.class.getName()}; + } + +} Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestDefaultAnalyzerFactory.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestDefaultAnalyzerFactory.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestDefaultAnalyzerFactory.java 2014-05-08 01:49:33 UTC (rev 8226) @@ -0,0 +1,43 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on May 7, 2014 + */ +package com.bigdata.search; + +public class TestDefaultAnalyzerFactory extends AbstractAnalyzerFactoryTest { + + public TestDefaultAnalyzerFactory() { + } + + public TestDefaultAnalyzerFactory(String arg0) { + super(arg0); + } + + @Override + String[] getExtraProperties() { + return new String[0]; + } + +} Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java 2014-05-08 01:49:13 UTC (rev 8225) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java 2014-05-08 01:49:33 UTC (rev 8226) @@ -93,6 +93,10 @@ return keyBuilder; } + + IndexMetadata getIndexMetadata() { + return getNdx().getIndex().getIndexMetadata(); + } private IKeyBuilder keyBuilder; /** This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jer...@us...> - 2014-05-08 01:49:16
|
Revision: 8225 http://sourceforge.net/p/bigdata/code/8225 Author: jeremy_carroll Date: 2014-05-08 01:49:13 +0000 (Thu, 08 May 2014) Log Message: ----------- improved encapsulation Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestPrefixSearch.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearch.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearchRestartSafe.java Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java 2014-05-08 01:49:00 UTC (rev 8224) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java 2014-05-08 01:49:13 UTC (rev 8225) @@ -8,11 +8,11 @@ import com.bigdata.journal.ProxyTestCase; public abstract class AbstractSearchTest extends ProxyTestCase<IIndexManager> { - String NAMESPACE; - IIndexManager indexManager; - FullTextIndex<Long> ndx; - IndexMetadata indexMetadata; - Properties properties; + private String namespace; + private IIndexManager indexManager; + private FullTextIndex<Long> ndx; + private IndexMetadata indexMetadata; + private Properties properties; public AbstractSearchTest() { } @@ -22,13 +22,13 @@ } void init(String ...propertyValuePairs) { - NAMESPACE = getName(); + namespace = getName(); properties = getProperties(); for (int i=0; i<propertyValuePairs.length; ) { properties.setProperty(propertyValuePairs[i++], propertyValuePairs[i++]); } indexManager = getStore(); - ndx = new FullTextIndex<Long>(indexManager, NAMESPACE, ITx.UNISOLATED, properties); + ndx = new FullTextIndex<Long>(indexManager, namespace, ITx.UNISOLATED, properties); ndx.create(); indexMetadata = ndx.getIndex().getIndexMetadata(); } @@ -38,5 +38,31 @@ super.tearDown(); } + String getNamespace() { + return namespace; + } + IIndexManager getIndexManager() { + return indexManager; + } + + void setIndexManager(IIndexManager indexManager) { + this.indexManager = indexManager; + } + + FullTextIndex<Long> getNdx() { + return ndx; + } + + IndexMetadata getIndexMetadata() { + return indexMetadata; + } + + + Properties getSearchProperties() { + return properties; + } + + + } Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java 2014-05-08 01:49:00 UTC (rev 8224) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java 2014-05-08 01:49:13 UTC (rev 8225) @@ -117,7 +117,7 @@ // Use English. KeyBuilder.Options.USER_LANGUAGE, "en"); - final FullTextIndexTupleSerializer<Long> tupleSer = (FullTextIndexTupleSerializer<Long>) indexMetadata + final FullTextIndexTupleSerializer<Long> tupleSer = (FullTextIndexTupleSerializer<Long>) getIndexMetadata() .getTupleSerializer(); if(log.isInfoEnabled()) @@ -131,13 +131,13 @@ // ((DefaultKeyBuilderFactory) tupleSer.getKeyBuilderFactory()) // .getLocale().getLanguage()); - doKeyOrderTest(ndx, -1L/* docId */, 0/* fieldId */, true/* fieldsEnabled */); - doKeyOrderTest(ndx, 0L/* docId */, 0/* fieldId */, true/* fieldsEnabled */); - doKeyOrderTest(ndx, 1L/* docId */, 12/* fieldId */, true/* fieldsEnabled */); + doKeyOrderTest(getNdx(), -1L/* docId */, 0/* fieldId */, true/* fieldsEnabled */); + doKeyOrderTest(getNdx(), 0L/* docId */, 0/* fieldId */, true/* fieldsEnabled */); + doKeyOrderTest(getNdx(), 1L/* docId */, 12/* fieldId */, true/* fieldsEnabled */); - doKeyOrderTest(ndx, -1L/* docId */, 0/* fieldId */, false/* fieldsEnabled */); - doKeyOrderTest(ndx, 0L/* docId */, 0/* fieldId */, false/* fieldsEnabled */); - doKeyOrderTest(ndx, 1L/* docId */, 0/* fieldId */, false/* fieldsEnabled */); + doKeyOrderTest(getNdx(), -1L/* docId */, 0/* fieldId */, false/* fieldsEnabled */); + doKeyOrderTest(getNdx(), 0L/* docId */, 0/* fieldId */, false/* fieldsEnabled */); + doKeyOrderTest(getNdx(), 1L/* docId */, 0/* fieldId */, false/* fieldsEnabled */); } Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestPrefixSearch.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestPrefixSearch.java 2014-05-08 01:49:00 UTC (rev 8224) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestPrefixSearch.java 2014-05-08 01:49:13 UTC (rev 8225) @@ -86,14 +86,14 @@ { - final TokenBuffer<Long> buffer = new TokenBuffer<Long>(2, ndx); + final TokenBuffer<Long> buffer = new TokenBuffer<Long>(2, getNdx()); // index a document. ("The" is a stopword). - ndx.index(buffer, docId, fieldId, languageCode, + getNdx().index(buffer, docId, fieldId, languageCode, new StringReader("The quick brown dog")); // index a document. ("The" is a stopword). - ndx.index(buffer, docId + 1, fieldId, languageCode, + getNdx().index(buffer, docId + 1, fieldId, languageCode, new StringReader("The slow brown cow")); buffer.flush(); @@ -103,7 +103,7 @@ /* Search (exact match on one document, partial match on the other) */ { - final Hiterator<?> itr = ndx.search(new FullTextQuery("The quick brown dog", + final Hiterator<?> itr = getNdx().search(new FullTextQuery("The quick brown dog", languageCode, false/* prefixMatch */ , regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit)); @@ -111,7 +111,7 @@ if (log.isInfoEnabled()) log.info("hits:" + itr); - assertEquals(2, ndx.count(new FullTextQuery("The quick brown dog", + assertEquals(2, getNdx().count(new FullTextQuery("The quick brown dog", languageCode, false/* prefixMatch */))); assertTrue(itr.hasNext()); @@ -134,13 +134,13 @@ */ { - final Hiterator<?> itr = ndx.search(new FullTextQuery("The qui bro do", + final Hiterator<?> itr = getNdx().search(new FullTextQuery("The qui bro do", languageCode, true/*prefixMatch*/, regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit)); if(log.isInfoEnabled()) log.info("hits:" + itr); - assertEquals(2, ndx.count(new FullTextQuery("The qui bro do", + assertEquals(2, getNdx().count(new FullTextQuery("The qui bro do", languageCode, true/*prefixMatch*/))); assertTrue(itr.hasNext()); @@ -163,14 +163,14 @@ */ { - final Hiterator<?> itr = ndx + final Hiterator<?> itr = getNdx() .search(new FullTextQuery("brown", languageCode, false/* prefixMatch */, regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit)); if(log.isInfoEnabled()) log.info("hits:" + itr); - assertEquals(2, ndx + assertEquals(2, getNdx() .count(new FullTextQuery("brown", languageCode, false/* prefixMatch */, regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit))); @@ -181,13 +181,13 @@ */ { - final Hiterator<?> itr = ndx + final Hiterator<?> itr = getNdx() .search(new FullTextQuery("brown", languageCode, true/* prefixMatch */, regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit)); if(log.isInfoEnabled()) log.info("hits:" + itr); - assertEquals(2, ndx + assertEquals(2, getNdx() .count(new FullTextQuery("brown", languageCode, true/* prefixMatch */, regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit))); @@ -198,13 +198,13 @@ */ { - final Hiterator<?> itr = ndx + final Hiterator<?> itr = getNdx() .search(new FullTextQuery("bro", languageCode, true/* prefixMatch */, regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit)); if(log.isInfoEnabled()) log.info("hits:" + itr); - assertEquals(2, ndx + assertEquals(2, getNdx() .count(new FullTextQuery("bro", languageCode, true/* prefixMatch */, regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit))); @@ -215,7 +215,7 @@ */ { - final Hiterator<?> itr = ndx + final Hiterator<?> itr = getNdx() .search(new FullTextQuery("bro", languageCode, false/* prefixMatch */, regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit)); @@ -231,7 +231,7 @@ */ { - final Hiterator<?> itr = ndx + final Hiterator<?> itr = getNdx() .search(new FullTextQuery("qui", languageCode, true/* prefixMatch */, regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit)); @@ -247,7 +247,7 @@ */ { - final Hiterator<?> itr = ndx + final Hiterator<?> itr = getNdx() .search(new FullTextQuery("qui", languageCode, false/* prefixMatch */, regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit)); @@ -263,7 +263,7 @@ */ { - final Hiterator<?> itr = ndx + final Hiterator<?> itr = getNdx() .search(new FullTextQuery("quick", languageCode, false/* prefixMatch */, regex, matchAllTerms, false/* matchExact*/, minCosine, maxCosine, minRank, maxRank, timeout, unit)); Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearch.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearch.java 2014-05-08 01:49:00 UTC (rev 8224) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearch.java 2014-05-08 01:49:13 UTC (rev 8225) @@ -124,10 +124,10 @@ */ long docId = 1; final int fieldId = 0; - final TokenBuffer<Long> buffer = new TokenBuffer<Long>(docs.length, ndx); + final TokenBuffer<Long> buffer = new TokenBuffer<Long>(docs.length, getNdx()); for (String s : docs) { - ndx.index(buffer, Long.valueOf(docId++), fieldId, + getNdx().index(buffer, Long.valueOf(docId++), fieldId, languageCode, new StringReader(s)); } @@ -141,7 +141,7 @@ final String query = "child proofing"; - final Hiterator<Hit<Long>> itr = ndx.search(new FullTextQuery( + final Hiterator<Hit<Long>> itr = getNdx().search(new FullTextQuery( query, languageCode, prefixMatch, regex, matchAllTerms, false/* matchExact*/, Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearchRestartSafe.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearchRestartSafe.java 2014-05-08 01:49:00 UTC (rev 8224) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearchRestartSafe.java 2014-05-08 01:49:13 UTC (rev 8225) @@ -112,12 +112,12 @@ final String languageCode = "EN"; { - final TokenBuffer<Long> buffer = new TokenBuffer<Long>(2, ndx); + final TokenBuffer<Long> buffer = new TokenBuffer<Long>(2, getNdx()); - ndx.index(buffer, docId, fieldId, languageCode, + getNdx().index(buffer, docId, fieldId, languageCode, new StringReader(text)); - ndx.index(buffer, docId + 1, fieldId, languageCode, + getNdx().index(buffer, docId + 1, fieldId, languageCode, new StringReader("The slow brown cow")); buffer.flush(); @@ -127,8 +127,8 @@ /* Search w/o restart. */ { - ndx = new FullTextIndex<Long>(indexManager, - NAMESPACE, ITx.UNISOLATED, properties); + final FullTextIndex<Long> ndx = new FullTextIndex<Long>(getIndexManager(), + getNamespace(), ITx.UNISOLATED, getSearchProperties()); final Hiterator<?> itr = // ndx.search( @@ -160,13 +160,13 @@ /* * Shutdown and restart. */ - indexManager = reopenStore(indexManager); + setIndexManager(reopenStore(getIndexManager())); /* Search with restart. */ { + final FullTextIndex<Long> ndx = new FullTextIndex<Long>(getIndexManager(), getNamespace(), + ITx.UNISOLATED, getSearchProperties()); - ndx = new FullTextIndex<Long>( - indexManager, NAMESPACE, ITx.UNISOLATED, properties); final Hiterator<?> itr = // ndx.search(text, languageCode); ndx.search(new FullTextQuery(text, This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jer...@us...> - 2014-05-08 01:49:03
|
Revision: 8224 http://sourceforge.net/p/bigdata/code/8224 Author: jeremy_carroll Date: 2014-05-08 01:49:00 +0000 (Thu, 08 May 2014) Log Message: ----------- Extracted common superclass Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestPrefixSearch.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearch.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearchRestartSafe.java Added Paths: ----------- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/AbstractSearchTest.java 2014-05-08 01:49:00 UTC (rev 8224) @@ -0,0 +1,42 @@ +package com.bigdata.search; + +import java.util.Properties; + +import com.bigdata.btree.IndexMetadata; +import com.bigdata.journal.IIndexManager; +import com.bigdata.journal.ITx; +import com.bigdata.journal.ProxyTestCase; + +public abstract class AbstractSearchTest extends ProxyTestCase<IIndexManager> { + String NAMESPACE; + IIndexManager indexManager; + FullTextIndex<Long> ndx; + IndexMetadata indexMetadata; + Properties properties; + + public AbstractSearchTest() { + } + + public AbstractSearchTest(String arg0) { + super(arg0); + } + + void init(String ...propertyValuePairs) { + NAMESPACE = getName(); + properties = getProperties(); + for (int i=0; i<propertyValuePairs.length; ) { + properties.setProperty(propertyValuePairs[i++], propertyValuePairs[i++]); + } + indexManager = getStore(); + ndx = new FullTextIndex<Long>(indexManager, NAMESPACE, ITx.UNISOLATED, properties); + ndx.create(); + indexMetadata = ndx.getIndex().getIndexMetadata(); + } + + public void tearDown() throws Exception { + indexManager.destroy(); + super.tearDown(); + } + + +} Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java 2014-05-07 20:24:56 UTC (rev 8223) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestKeyBuilder.java 2014-05-08 01:49:00 UTC (rev 8224) @@ -48,7 +48,7 @@ * @author <a href="mailto:tho...@us...">Bryan Thompson</a> * @version $Id$ */ -public class TestKeyBuilder extends ProxyTestCase<IIndexManager> { +public class TestKeyBuilder extends AbstractSearchTest { /** * @@ -103,9 +103,6 @@ */ public void test_keyOrder() { - final String namespace = getName(); - - final Properties properties = getProperties(); // The default Strength should be Primary. assertEquals( @@ -113,25 +110,13 @@ StrengthEnum .valueOf(FullTextIndex.Options.DEFAULT_INDEXER_COLLATOR_STRENGTH)); + init( // Use the default Strength. - properties.setProperty(KeyBuilder.Options.STRENGTH, - FullTextIndex.Options.DEFAULT_INDEXER_COLLATOR_STRENGTH); - + KeyBuilder.Options.STRENGTH, + FullTextIndex.Options.DEFAULT_INDEXER_COLLATOR_STRENGTH, // Use English. - properties.setProperty(KeyBuilder.Options.USER_LANGUAGE, "en"); - - final IIndexManager store = getStore(); - - try { + KeyBuilder.Options.USER_LANGUAGE, "en"); - final FullTextIndex<Long> ndx = new FullTextIndex<Long>(store, - namespace, ITx.UNISOLATED, properties); - - ndx.create(); - - final IndexMetadata indexMetadata = ndx.getIndex() - .getIndexMetadata(); - final FullTextIndexTupleSerializer<Long> tupleSer = (FullTextIndexTupleSerializer<Long>) indexMetadata .getTupleSerializer(); @@ -154,9 +139,6 @@ doKeyOrderTest(ndx, 0L/* docId */, 0/* fieldId */, false/* fieldsEnabled */); doKeyOrderTest(ndx, 1L/* docId */, 0/* fieldId */, false/* fieldsEnabled */); - } finally { - store.destroy(); - } } protected void doKeyOrderTest(final FullTextIndex<Long> ndx, Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestPrefixSearch.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestPrefixSearch.java 2014-05-07 20:24:56 UTC (rev 8223) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestPrefixSearch.java 2014-05-08 01:49:00 UTC (rev 8224) @@ -46,7 +46,7 @@ * @author <a href="mailto:tho...@us...">Bryan Thompson</a> * @version $Id$ */ -public class TestPrefixSearch extends ProxyTestCase<IIndexManager> { +public class TestPrefixSearch extends AbstractSearchTest { /** * @@ -74,17 +74,9 @@ final TimeUnit unit = TimeUnit.MILLISECONDS; final String regex = null; - final Properties properties = getProperties(); - - final IIndexManager indexManager = getStore(properties); + init(); - try { - final String NAMESPACE = "test"; - - final FullTextIndex<Long> ndx = new FullTextIndex<Long>(indexManager, - NAMESPACE, ITx.UNISOLATED, properties); - /* * Index document(s). */ @@ -93,7 +85,6 @@ final String languageCode = "EN"; { - ndx.create(); final TokenBuffer<Long> buffer = new TokenBuffer<Long>(2, ndx); @@ -283,12 +274,7 @@ } - } finally { - indexManager.destroy(); - - } - } } Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearch.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearch.java 2014-05-07 20:24:56 UTC (rev 8223) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearch.java 2014-05-08 01:49:00 UTC (rev 8224) @@ -51,7 +51,7 @@ * @author <a href="mailto:tho...@us...">Bryan Thompson</a> * @version $Id$ */ -public class TestSearch extends ProxyTestCase<IIndexManager> { +public class TestSearch extends AbstractSearchTest { public TestSearch() { super(); @@ -105,7 +105,6 @@ /** all documents are in English. */ final String languageCode = "EN"; - final String NAMESPACE = "test"; final boolean prefixMatch = false; final double minCosine = .0; @@ -117,64 +116,46 @@ final TimeUnit unit = TimeUnit.MILLISECONDS; final String regex = null; - final Properties properties = getProperties(); - - final IIndexManager indexManager = getStore( properties ); - - try { + init(); + { - // setup and populate the index. - FullTextIndex<Long> ndx; - { - - ndx = new FullTextIndex<Long>(indexManager, NAMESPACE, - ITx.UNISOLATED, properties ); + /* + * Index the documents. + */ + long docId = 1; + final int fieldId = 0; + final TokenBuffer<Long> buffer = new TokenBuffer<Long>(docs.length, ndx); + for (String s : docs) { - ndx.create(); + ndx.index(buffer, Long.valueOf(docId++), fieldId, + languageCode, new StringReader(s)); - /* - * Index the documents. - */ - long docId = 1; - final int fieldId = 0; - final TokenBuffer<Long> buffer = new TokenBuffer<Long>(docs.length, ndx); - for (String s : docs) { + } - ndx.index(buffer, Long.valueOf(docId++), fieldId, - languageCode, new StringReader(s)); + // flush index writes to the database. + buffer.flush(); + } - } + // run query and verify results. + { - // flush index writes to the database. - buffer.flush(); - } + final String query = "child proofing"; - // run query and verify results. - { - - final String query = "child proofing"; - - final Hiterator<Hit<Long>> itr = ndx.search(new FullTextQuery( - query, - languageCode, prefixMatch, regex, - matchAllTerms, false/* matchExact*/, - minCosine, maxCosine, - minRank, maxRank, timeout, unit)); + final Hiterator<Hit<Long>> itr = ndx.search(new FullTextQuery( + query, + languageCode, prefixMatch, regex, + matchAllTerms, false/* matchExact*/, + minCosine, maxCosine, + minRank, maxRank, timeout, unit)); // query, languageCode, 0d/* minCosine */, // Integer.MAX_VALUE/* maxRank */); - - assertSameHits(new IHit[] { // - new HT<Long>(5L, 0.44194173824159216d),// - new HT<Long>(6L, 0.44194173824159216d),// - new HT<Long>(2L, 0.35355339059327373d),// - new HT<Long>(3L, 0.35355339059327373d),// - }, itr); - } - - } finally { - - indexManager.destroy(); - + + assertSameHits(new IHit[] { // + new HT<Long>(5L, 0.44194173824159216d),// + new HT<Long>(6L, 0.44194173824159216d),// + new HT<Long>(2L, 0.35355339059327373d),// + new HT<Long>(3L, 0.35355339059327373d),// + }, itr); } } Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearchRestartSafe.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearchRestartSafe.java 2014-05-07 20:24:56 UTC (rev 8223) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/test/com/bigdata/search/TestSearchRestartSafe.java 2014-05-08 01:49:00 UTC (rev 8224) @@ -43,7 +43,7 @@ * @author <a href="mailto:tho...@us...">Bryan Thompson</a> * @version $Id$ */ -public class TestSearchRestartSafe extends ProxyTestCase<IIndexManager> { +public class TestSearchRestartSafe extends AbstractSearchTest { /** * @@ -100,112 +100,96 @@ final TimeUnit unit = TimeUnit.MILLISECONDS; final String regex = null; - final Properties properties = getProperties(); - - IIndexManager indexManager = getStore(properties); + init(); - try { - final String NAMESPACE = "test"; + /* + * Index a document. + */ + final long docId = 12L; + final int fieldId = 3; + final String text = "The quick brown dog"; + final String languageCode = "EN"; + { - /* - * Index a document. - */ - final long docId = 12L; - final int fieldId = 3; - final String text = "The quick brown dog"; - final String languageCode = "EN"; - { + final TokenBuffer<Long> buffer = new TokenBuffer<Long>(2, ndx); - final FullTextIndex<Long> ndx = new FullTextIndex<Long>(indexManager, - NAMESPACE, ITx.UNISOLATED, properties); + ndx.index(buffer, docId, fieldId, languageCode, + new StringReader(text)); - ndx.create(); + ndx.index(buffer, docId + 1, fieldId, languageCode, + new StringReader("The slow brown cow")); - final TokenBuffer<Long> buffer = new TokenBuffer<Long>(2, ndx); + buffer.flush(); - ndx.index(buffer, docId, fieldId, languageCode, - new StringReader(text)); + } - ndx.index(buffer, docId + 1, fieldId, languageCode, - new StringReader("The slow brown cow")); + /* Search w/o restart. */ + { - buffer.flush(); + ndx = new FullTextIndex<Long>(indexManager, + NAMESPACE, ITx.UNISOLATED, properties); - } - - /* Search w/o restart. */ - { - - final FullTextIndex<Long> ndx = new FullTextIndex<Long>(indexManager, - NAMESPACE, ITx.UNISOLATED, properties); - - final Hiterator<?> itr = + final Hiterator<?> itr = // ndx.search( // text, languageCode // ); - ndx.search(new FullTextQuery(text, - languageCode, prefixMatch, - regex, matchAllTerms, false/* matchExact*/, - minCosine, maxCosine, - minRank, maxRank, timeout, unit)); - - assertEquals(1, itr.size()); // Note: 2nd result pruned by cosine. + ndx.search(new FullTextQuery(text, + languageCode, prefixMatch, + regex, matchAllTerms, false/* matchExact*/, + minCosine, maxCosine, + minRank, maxRank, timeout, unit)); + + assertEquals(1, itr.size()); // Note: 2nd result pruned by cosine. - assertTrue(itr.hasNext()); + assertTrue(itr.hasNext()); - final IHit<?> hit1 = itr.next(); + final IHit<?> hit1 = itr.next(); - if(log.isInfoEnabled()) - log.info("hit1:" + hit1); + if(log.isInfoEnabled()) + log.info("hit1:" + hit1); // /* // * Note: with cosine computation only the first hit is visited. // */ - assertFalse(itr.hasNext()); + assertFalse(itr.hasNext()); - } + } - /* - * Shutdown and restart. - */ - indexManager = reopenStore(indexManager); + /* + * Shutdown and restart. + */ + indexManager = reopenStore(indexManager); - /* Search with restart. */ - { + /* Search with restart. */ + { - final FullTextIndex<Long> ndx = new FullTextIndex<Long>( - indexManager, NAMESPACE, ITx.UNISOLATED, properties); + ndx = new FullTextIndex<Long>( + indexManager, NAMESPACE, ITx.UNISOLATED, properties); - final Hiterator<?> itr = // ndx.search(text, languageCode); - ndx.search(new FullTextQuery(text, - languageCode, prefixMatch, - regex, matchAllTerms, false/* matchExact*/, - minCosine, maxCosine, - minRank, maxRank, timeout, unit)); + final Hiterator<?> itr = // ndx.search(text, languageCode); + ndx.search(new FullTextQuery(text, + languageCode, prefixMatch, + regex, matchAllTerms, false/* matchExact*/, + minCosine, maxCosine, + minRank, maxRank, timeout, unit)); - assertEquals(1, itr.size()); // Note: 2nd result pruned by cosine. + assertEquals(1, itr.size()); // Note: 2nd result pruned by cosine. - assertTrue(itr.hasNext()); + assertTrue(itr.hasNext()); - final IHit<?> hit1 = itr.next(); + final IHit<?> hit1 = itr.next(); - if(log.isInfoEnabled()) - log.info("hit1:" + hit1); + if(log.isInfoEnabled()) + log.info("hit1:" + hit1); // /* // * Note: with cosine computation only the first hit is visited. // */ - assertFalse(itr.hasNext()); + assertFalse(itr.hasNext()); - } - - } finally { - - indexManager.destroy(); - } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2014-05-07 20:25:03
|
Revision: 8223 http://sourceforge.net/p/bigdata/code/8223 Author: mrpersonick Date: 2014-05-07 20:24:56 +0000 (Wed, 07 May 2014) Log Message: ----------- Commit of Blueprints/Gremlin support. See ticket 913. Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/.classpath branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/remote/BigdataSailRemoteRepository.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/remote/BigdataSailRemoteRepositoryConnection.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/client/RemoteRepository.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlClient2.java branches/BIGDATA_RELEASE_1_3_0/build.properties branches/BIGDATA_RELEASE_1_3_0/build.xml branches/BIGDATA_RELEASE_1_3_0/pom.xml Added Paths: ----------- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/LEGAL/jettison-license.txt branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/blueprints-test-2.4.0.jar branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/jettison-1.3.3.jar branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphBulkLoad.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphEmbedded.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataRDFFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataVertex.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BlueprintsRDFFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/edge.rq branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/edges.rq branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/edgesByProperty.rq branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/vertex.rq branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/com/ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/com/bigdata/ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/com/bigdata/blueprints/ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/com/bigdata/blueprints/TestBigdataGraphClient.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/com/bigdata/blueprints/TestBigdataGraphEmbedded.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/com/bigdata/blueprints/graph-example-1.xml branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java Removed Paths: ------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataBlueprintsGraph.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEventTransactionalGraph.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataVertex.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/QueryManager.java Modified: branches/BIGDATA_RELEASE_1_3_0/.classpath =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/.classpath 2014-05-07 15:57:53 UTC (rev 8222) +++ branches/BIGDATA_RELEASE_1_3_0/.classpath 2014-05-07 20:24:56 UTC (rev 8223) @@ -1,16 +1,19 @@ <?xml version="1.0" encoding="UTF-8"?> <classpath> + <classpathentry kind="src" path="bigdata/src/java"/> <classpathentry kind="src" path="bigdata-rdf/src/java"/> + <classpathentry kind="src" path="bigdata-sails/src/java"/> + <classpathentry kind="src" path="bigdata-blueprints/src/java"/> + <classpathentry kind="src" path="bigdata/src/test"/> + <classpathentry kind="src" path="bigdata-rdf/src/test"/> + <classpathentry kind="src" path="bigdata-sails/src/test"/> + <classpathentry kind="src" path="bigdata-blueprints/src/test"/> + <classpathentry kind="src" path="bigdata-war/src"/> + <classpathentry kind="src" path="bigdata/src/resources/logging"/> <classpathentry kind="src" path="bigdata-rdf/src/samples"/> <classpathentry kind="src" path="dsi-utils/src/java"/> - <classpathentry kind="src" path="bigdata/src/resources/logging"/> <classpathentry kind="src" path="bigdata-sails/src/samples"/> <classpathentry kind="src" path="bigdata-jini/src/test"/> - <classpathentry kind="src" path="bigdata-sails/src/java"/> - <classpathentry kind="src" path="bigdata/src/java"/> - <classpathentry kind="src" path="bigdata-rdf/src/test"/> - <classpathentry kind="src" path="bigdata/src/test"/> - <classpathentry kind="src" path="bigdata-sails/src/test"/> <classpathentry kind="src" path="bigdata-jini/src/java"/> <classpathentry kind="src" path="contrib/src/problems"/> <classpathentry kind="src" path="bigdata/src/samples"/> @@ -21,7 +24,6 @@ <classpathentry kind="src" path="junit-ext/src/java"/> <classpathentry kind="src" path="lgpl-utils/src/java"/> <classpathentry kind="src" path="lgpl-utils/src/test"/> - <classpathentry kind="src" path="bigdata-war/src"/> <classpathentry kind="src" path="bigdata-ganglia/src/java"/> <classpathentry kind="src" path="bigdata-ganglia/src/test"/> <classpathentry kind="src" path="bigdata-rdf/src/resources/service-providers"/> @@ -92,5 +94,8 @@ <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-webapp-9.1.4.v20140401.jar" sourcepath="/Users/bryan/Downloads/org.eclipse.jetty.project-jetty-9.1.4.v20140401"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-xml-9.1.4.v20140401.jar"/> <classpathentry exported="true" kind="lib" path="bigdata-sails/lib/jackson-core-2.2.3.jar"/> + <classpathentry kind="lib" path="bigdata-blueprints/lib/blueprints-core-2.4.0.jar"/> + <classpathentry kind="lib" path="bigdata-blueprints/lib/blueprints-test-2.4.0.jar"/> + <classpathentry kind="lib" path="bigdata-blueprints/lib/jettison-1.3.3.jar"/> <classpathentry kind="output" path="bin"/> </classpath> Added: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/LEGAL/jettison-license.txt =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/LEGAL/jettison-license.txt (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/LEGAL/jettison-license.txt 2014-05-07 20:24:56 UTC (rev 8223) @@ -0,0 +1,13 @@ +Copyright 2006 Envoi Solutions LLC + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. \ No newline at end of file Property changes on: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/LEGAL/jettison-license.txt ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property Added: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/blueprints-test-2.4.0.jar =================================================================== (Binary files differ) Index: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/blueprints-test-2.4.0.jar =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/blueprints-test-2.4.0.jar 2014-05-07 15:57:53 UTC (rev 8222) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/blueprints-test-2.4.0.jar 2014-05-07 20:24:56 UTC (rev 8223) Property changes on: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/blueprints-test-2.4.0.jar ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +application/octet-stream \ No newline at end of property Added: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/jettison-1.3.3.jar =================================================================== (Binary files differ) Index: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/jettison-1.3.3.jar =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/jettison-1.3.3.jar 2014-05-07 15:57:53 UTC (rev 8222) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/jettison-1.3.3.jar 2014-05-07 20:24:56 UTC (rev 8223) Property changes on: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/lib/jettison-1.3.3.jar ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +application/octet-stream \ No newline at end of property Deleted: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataBlueprintsGraph.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataBlueprintsGraph.java 2014-05-07 15:57:53 UTC (rev 8222) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataBlueprintsGraph.java 2014-05-07 20:24:56 UTC (rev 8223) @@ -1,141 +0,0 @@ -package com.bigdata.blueprints; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; - -import sun.reflect.generics.reflectiveObjects.NotImplementedException; - -import com.tinkerpop.blueprints.Edge; -import com.tinkerpop.blueprints.Features; -import com.tinkerpop.blueprints.GraphQuery; -import com.tinkerpop.blueprints.TransactionalGraph; -import com.tinkerpop.blueprints.Vertex; - - -public abstract class BigdataBlueprintsGraph implements BigdataEventTransactionalGraph { - // elements that we will be deleting from the store - private ArrayList<BigdataElement> removedElements = new ArrayList<BigdataElement>(); - // vertices that we will be adding to the store - private HashMap<String,BigdataVertex> addedVertices = new HashMap<String,BigdataVertex>(); - // elements that we will be adding to the store - private HashMap<String,BigdataEdge> addedEdges = new HashMap<String,BigdataEdge>(); - private QueryManager qm = null; - - public BigdataBlueprintsGraph () { } - - public BigdataBlueprintsGraph (QueryManager qm) { this.qm = qm; } - - public void setQueryManager(QueryManager qm) { this.qm = qm; } - public QueryManager getQueryManager() { return qm; } - - public void commit() { - // form and submit query - // - // - // - throwUnimplemented( "commit" ); - } - - public void rollback() { - throwUnimplemented( "rollback" ); - } - - public void stopTransaction(TransactionalGraph.Conclusion conclusion) { - throwUnimplemented( "stopTransaction" ); - } - - public void shutdown() { - throwUnimplemented( "shutdown" ); - } - - public Vertex getVertex(Object id) { - // we can only remove an item from the "add" queue - return addedVertices.get( (String) id ); - } - - public BigdataBlueprintsGraph getBasseGraph() { return this; } - - public Edge addEdge(Object id, BigdataVertex outVertex, BigdataVertex inVertex, String label) { - BigdataEdge edge = new BigdataEdge( (String)id, outVertex, inVertex, label ); - addedEdges.put((String)id, edge); - return edge; - } - - public Features getFeatures() { - throwUnimplemented( "getFeatures" ); - return (Features)null; - } - - public Vertex addVertex(Object id) { - BigdataVertex v = new BigdataVertex( (String)id ); - addedVertices.put( (String)id, v ); - return v; - } - - public void removeVertex(BigdataVertex vertex) { - addedVertices.remove( vertex.getId() ); // if present - removedElements.add( vertex ); - } - - public Iterable<Vertex> getVertices(String key, Object value) { - throwUnimplemented( "getVertices(String key, Object value)" ); - return (Iterable<Vertex>)null; - } - - public Iterable<Vertex> getVertices() { - // we only return what is in the "add" queue - final List<Vertex> vertexList = new ArrayList<Vertex>(); - vertexList.addAll( addedVertices.values() ); - return vertexList; - } - - public Edge getEdge(Object id) { - // we can only remove an item from the "add" queue - return addedEdges.get( (String) id ); - } - - public void removeEdge(BigdataEdge edge) { - addedEdges.remove( edge.getId() ); // if present - removedElements.add( edge ); - } - - public Iterable<Edge> getEdges(String key, Object value) { - throwUnimplemented( "getEdges(String key, Object value)" ); - return (Iterable<Edge>)null; - } - - public Iterable<Edge> getEdges() { - // we only return what is in the add queue - final List<Edge> edgeList = new ArrayList<Edge>(); - edgeList.addAll( addedEdges.values() ); - return edgeList; - } - - public GraphQuery query() { - throwUnimplemented( "queries" ); - return (GraphQuery)null; - } - - // @SuppressWarnings("deprecation") - private void throwUnimplemented(String method) { - // unchecked( new Exception( "The '" + method + "' has not been implemented." ) ); - throw new NotImplementedException(); - } - - - /* Maybe use later - * - public static RuntimeException unchecked(Throwable e) { - BigdataBlueprintsGraph.<RuntimeException>throwAny(e); - return null; - } - - @SuppressWarnings("unchecked") - private static <E extends Throwable> void throwAny(Throwable e) throws E { - throw (E)e; - } - */ - -} - Deleted: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java 2014-05-07 15:57:53 UTC (rev 8222) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java 2014-05-07 20:24:56 UTC (rev 8223) @@ -1,52 +0,0 @@ -package com.bigdata.blueprints; - -import sun.reflect.generics.reflectiveObjects.NotImplementedException; - -import com.tinkerpop.blueprints.Direction; -import com.tinkerpop.blueprints.Edge; -import com.tinkerpop.blueprints.Vertex; - - -public class BigdataEdge extends BigdataElement implements Edge { - - protected BigdataVertex vOut = null; - protected BigdataVertex vIn = null; - - - public BigdataEdge(String id) { - super(id); - } - - public BigdataEdge(String id, String label) { - super(id,label); - } - - public BigdataEdge(String id, BigdataVertex out, BigdataVertex in, String label) { - super(id,label); - this.vOut = out; - this.vIn = in; - } - - public Vertex getVertex(Direction direction) throws IllegalArgumentException { - if( direction == Direction.IN ) { - return vIn; - } - else if( direction == Direction.OUT ) { - return vOut; - } - else { - throw new NotImplementedException(); - } - } - - @Override - public void remove() { - throw new NotImplementedException(); - } - - public String toString() { - // toTTLString(); - return "Not Implemented"; - } - -} Added: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java 2014-05-07 20:24:56 UTC (rev 8223) @@ -0,0 +1,107 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.blueprints; + +import java.util.Arrays; +import java.util.List; + +import org.openrdf.model.Statement; +import org.openrdf.model.URI; +import org.openrdf.model.vocabulary.RDFS; + +import com.tinkerpop.blueprints.Direction; +import com.tinkerpop.blueprints.Edge; +import com.tinkerpop.blueprints.Vertex; + +/** + * Edge implementation that wraps an Edge statement and points to a + * {@link BigdataGraph} instance. + * + * @author mikepersonick + * + */ +public class BigdataEdge extends BigdataElement implements Edge { + + private static final List<String> blacklist = Arrays.asList(new String[] { + "id", "", "label" + }); + + protected final Statement stmt; + + public BigdataEdge(final Statement stmt, final BigdataGraph graph) { + super(stmt.getPredicate(), graph); + + this.stmt = stmt; + } + + @Override + public Object getId() { + return graph.factory.fromEdgeURI(uri); + } + + @Override + public void remove() { + graph.removeEdge(this); + } + + @Override + public String getLabel() { + return (String) graph.getProperty(uri, RDFS.LABEL); + } + + @Override + public Vertex getVertex(final Direction dir) throws IllegalArgumentException { + + if (dir == Direction.BOTH) { + throw new IllegalArgumentException(); + } + + final URI uri = (URI) + (dir == Direction.OUT ? stmt.getSubject() : stmt.getObject()); + + final String id = graph.factory.fromVertexURI(uri); + + return graph.getVertex(id); + + } + + @Override + public void setProperty(final String property, final Object val) { + + if (property == null || blacklist.contains(property)) { + throw new IllegalArgumentException(); + } + + super.setProperty(property, val); + + } + + @Override + public String toString() { + final URI s = (URI) stmt.getSubject(); + final URI p = (URI) stmt.getPredicate(); + final URI o = (URI) stmt.getObject(); + return "e["+p.getLocalName()+"]["+s.getLocalName()+"->"+o.getLocalName()+"]"; + } + +} Property changes on: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property Deleted: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java 2014-05-07 15:57:53 UTC (rev 8222) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java 2014-05-07 20:24:56 UTC (rev 8223) @@ -1,70 +0,0 @@ -package com.bigdata.blueprints; - -import sun.reflect.generics.reflectiveObjects.NotImplementedException; - -import java.util.HashMap; -import java.util.Set; - -import org.openrdf.model.vocabulary.RDFS; -import com.tinkerpop.blueprints.Element; - -public class BigdataElement implements Element { - - protected String id = null; // must be a URI - - // implied here is that the properties exist in the graph store, we would need a 2nd property setter - private HashMap<String,String> properties = new HashMap<String,String>(); - // properties that we will be deleting from the store - private HashMap<String,String> removedProperties = new HashMap<String,String>(); - // properties that we will be adding to the store - private HashMap<String,String> addedProperties = new HashMap<String,String>(); - - public BigdataElement(String id) { - this.id = id; - } - - public BigdataElement(String id, String label) { - this.id = id; - setProperty( RDFS.LABEL.toString(), label ); - } - - @SuppressWarnings("unchecked") - public <T> T getProperty(String key) { - return (T) properties.get(key); - } - - public Set<String> getPropertyKeys() { - Set<String> keys = properties.keySet(); - keys.addAll( addedProperties.keySet() ); - return keys; - } - - public void setProperty(String key, Object value) { - addedProperties.put(key,(String)value ); - properties.put(key, (String)value); - } - - @SuppressWarnings("unchecked") - public <T> T removeProperty(String key) { - removedProperties.put(key, key); - return (T) properties.remove(key); - } - - public void remove() { - // delete from graph - throw new NotImplementedException(); - } - - public Object getId() { - return id; - } - - public boolean equals(Object obj) { - return obj.toString().equals(this.toString()); - } - - public String getLabel() { - return getProperty( RDFS.LABEL.toString() ); - } - -} Added: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java 2014-05-07 20:24:56 UTC (rev 8223) @@ -0,0 +1,134 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.blueprints; + +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +import org.openrdf.model.Literal; +import org.openrdf.model.URI; + +import com.tinkerpop.blueprints.Element; + +/** + * Base class for {@link BigdataVertex} and {@link BigdataEdge}. Handles + * property-related methods. + * + * @author mikepersonick + * + */ +public abstract class BigdataElement implements Element { + + private static final List<String> blacklist = Arrays.asList(new String[] { + "id", "" + }); + + protected final URI uri; + protected final BigdataGraph graph; + + public BigdataElement(final URI uri, final BigdataGraph graph) { + this.uri = uri; + this.graph = graph; + } + + @Override + @SuppressWarnings("unchecked") + public <T> T getProperty(final String property) { + + final URI p = graph.factory.toPropertyURI(property); + + return (T) graph.getProperty(uri, p); + + } + + @Override + public Set<String> getPropertyKeys() { + + return graph.getPropertyKeys(uri); + + } + + @Override + @SuppressWarnings("unchecked") + public <T> T removeProperty(final String property) { + + final URI p = graph.factory.toPropertyURI(property); + + return (T) graph.removeProperty(uri, p); + + } + + @Override + public void setProperty(final String property, final Object val) { + + if (property == null || blacklist.contains(property)) { + throw new IllegalArgumentException(); + } + + final URI p = graph.factory.toPropertyURI(property); + + final Literal o = graph.factory.toLiteral(val); + + graph.setProperty(uri, p, o); + + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((graph == null) ? 0 : graph.hashCode()); + result = prime * result + ((uri == null) ? 0 : uri.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + BigdataElement other = (BigdataElement) obj; + if (graph == null) { + if (other.graph != null) + return false; + } else if (!graph.equals(other.graph)) + return false; + if (uri == null) { + if (other.uri != null) + return false; + } else if (!uri.equals(other.uri)) + return false; + return true; + } + + @Override + public String toString() { + return uri.toString(); + } + + +} Property changes on: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property Deleted: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEventTransactionalGraph.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEventTransactionalGraph.java 2014-05-07 15:57:53 UTC (rev 8222) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEventTransactionalGraph.java 2014-05-07 20:24:56 UTC (rev 8223) @@ -1,8 +0,0 @@ -package com.bigdata.blueprints; - -import com.tinkerpop.blueprints.Graph; -import com.tinkerpop.blueprints.ThreadedTransactionalGraph; - -public interface BigdataEventTransactionalGraph extends Graph, ThreadedTransactionalGraph { - -} Added: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java 2014-05-07 20:24:56 UTC (rev 8223) @@ -0,0 +1,843 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.blueprints; + +import info.aduna.iteration.CloseableIteration; + +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.UUID; + +import org.apache.commons.io.IOUtils; +import org.openrdf.OpenRDFException; +import org.openrdf.model.Literal; +import org.openrdf.model.Statement; +import org.openrdf.model.URI; +import org.openrdf.model.Value; +import org.openrdf.model.impl.StatementImpl; +import org.openrdf.model.impl.URIImpl; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.RDFS; +import org.openrdf.query.GraphQueryResult; +import org.openrdf.query.QueryLanguage; +import org.openrdf.repository.RepositoryConnection; +import org.openrdf.repository.RepositoryResult; + +import com.bigdata.rdf.store.BD; +import com.tinkerpop.blueprints.Direction; +import com.tinkerpop.blueprints.Edge; +import com.tinkerpop.blueprints.Features; +import com.tinkerpop.blueprints.Graph; +import com.tinkerpop.blueprints.GraphQuery; +import com.tinkerpop.blueprints.Vertex; +import com.tinkerpop.blueprints.util.DefaultGraphQuery; + +/** + * A base class for a Blueprints wrapper around a bigdata back-end. + * + * @author mikepersonick + * + */ +public abstract class BigdataGraph implements Graph { + + public static final URI VERTEX = new URIImpl(BD.NAMESPACE + "Vertex"); + + public static final URI EDGE = new URIImpl(BD.NAMESPACE + "Edge"); + +// final BigdataSailRepository repo; +// +// transient BigdataSailRepositoryConnection cxn; + + final BlueprintsRDFFactory factory; + +// public BigdataGraph(final BigdataSailRepository repo) { +// this(repo, BigdataRDFFactory.INSTANCE); +// } + + public BigdataGraph(//final BigdataSailRepository repo, + final BlueprintsRDFFactory factory) { +// try { +// this.repo = repo; +// this.cxn = repo.getUnisolatedConnection(); +// this.cxn.setAutoCommit(false); + this.factory = factory; +// } catch (RepositoryException ex) { +// throw new RuntimeException(ex); +// } + } + + public String toString() { + return getClass().getSimpleName().toLowerCase(); + } + + protected abstract RepositoryConnection cxn() throws Exception; + +// public BigdataSailRepositoryConnection getConnection() { +// return this.cxn; +// } +// +// public BlueprintsRDFFactory getFactory() { +// return this.factory; +// } + +// public Value getValue(final URI s, final URI p) { +// +// try { +// +// final RepositoryResult<Statement> result = +// cxn.getStatements(s, p, null, false); +// +// if (result.hasNext()) { +// +// final Value o = result.next().getObject(); +// +// if (result.hasNext()) { +// throw new RuntimeException(s +// + ": more than one value for p: " + p +// + ", did you mean to call getValues()?"); +// } +// +// return o; +// +// } +// +// return null; +// +// } catch (Exception ex) { +// throw new RuntimeException(ex); +// } +// +// } + + public Object getProperty(final URI s, final URI p) { + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(s, p, null, false); + + if (result.hasNext()) { + + final Value value = result.next().getObject(); + + if (result.hasNext()) { + throw new RuntimeException(s + + ": more than one value for p: " + p + + ", did you mean to call getValues()?"); + } + + if (!(value instanceof Literal)) { + throw new RuntimeException("not a property: " + value); + } + + final Literal lit = (Literal) value; + + return factory.fromLiteral(lit); + + } + + return null; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + +// public List<Value> getValues(final URI s, final URI p) { +// +// try { +// +// final RepositoryResult<Statement> result = +// cxn().getStatements(s, p, null, false); +// +// final List<Value> values = new LinkedList<Value>(); +// +// while (result.hasNext()) { +// +// final Value o = result.next().getObject(); +// +// values.add(o); +// +// } +// +// return values; +// +// } catch (Exception ex) { +// throw new RuntimeException(ex); +// } +// +// } + + public List<Object> getProperties(final URI s, final URI p) { + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(s, p, null, false); + + final List<Object> props = new LinkedList<Object>(); + + while (result.hasNext()) { + + final Value value = result.next().getObject(); + + if (!(value instanceof Literal)) { + throw new RuntimeException("not a property: " + value); + } + + final Literal lit = (Literal) value; + + props.add(factory.fromLiteral(lit)); + + } + + return props; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + public Set<String> getPropertyKeys(final URI s) { + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(s, null, null, false); + + final Set<String> properties = new LinkedHashSet<String>(); + + while (result.hasNext()) { + + final Statement stmt = result.next(); + + if (!(stmt.getObject() instanceof Literal)) { + continue; + } + + if (stmt.getPredicate().equals(RDFS.LABEL)) { + continue; + } + + final String p = + factory.fromPropertyURI(stmt.getPredicate()); + + properties.add(p); + + } + + return properties; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + public Object removeProperty(final URI s, final URI p) { + + try { + + final Object oldVal = getProperty(s, p); + + cxn().remove(s, p, null); + + return oldVal; + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + + public void setProperty(final URI s, final URI p, final Literal o) { + + try { + + cxn().remove(s, p, null); + + cxn().add(s, p, o); + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + + @Override + public Edge addEdge(final Object key, final Vertex from, final Vertex to, + final String label) { + + if (label == null) { + throw new IllegalArgumentException(); + } + + final String eid = key != null ? key.toString() : UUID.randomUUID().toString(); + + final URI edgeURI = factory.toEdgeURI(eid); + + if (key != null) { + + final Edge edge = getEdge(key); + + if (edge != null) { + if (!(edge.getVertex(Direction.OUT).equals(from) && + (edge.getVertex(Direction.OUT).equals(to)))) { + throw new IllegalArgumentException("edge already exists: " + key); + } + } + + } + + try { + +// if (cxn().hasStatement(edgeURI, RDF.TYPE, EDGE, false)) { +// throw new IllegalArgumentException("edge " + eid + " already exists"); +// } + + final URI fromURI = factory.toVertexURI(from.getId().toString()); + final URI toURI = factory.toVertexURI(to.getId().toString()); + + cxn().add(fromURI, edgeURI, toURI); + cxn().add(edgeURI, RDF.TYPE, EDGE); + cxn().add(edgeURI, RDFS.LABEL, factory.toLiteral(label)); + + return new BigdataEdge(new StatementImpl(fromURI, edgeURI, toURI), this); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + @Override + public Vertex addVertex(final Object key) { + + try { + + final String vid = key != null ? + key.toString() : UUID.randomUUID().toString(); + + final URI uri = factory.toVertexURI(vid); + +// if (cxn().hasStatement(vertexURI, RDF.TYPE, VERTEX, false)) { +// throw new IllegalArgumentException("vertex " + vid + " already exists"); +// } + + cxn().add(uri, RDF.TYPE, VERTEX); + + return new BigdataVertex(uri, this); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + @Override + public Edge getEdge(final Object key) { + + if (key == null) + throw new IllegalArgumentException(); + + try { + + final URI edge = factory.toEdgeURI(key.toString()); + + final RepositoryResult<Statement> result = + cxn().getStatements(null, edge, null, false); + + if (result.hasNext()) { + + final Statement stmt = result.next(); + + if (result.hasNext()) { + throw new RuntimeException( + "duplicate edge: " + key); + } + + return new BigdataEdge(stmt, this); + + } + + return null; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + @Override + public Iterable<Edge> getEdges() { + + final URI wild = null; + return getEdges(wild, wild); + + } + + public Iterable<Edge> getEdges(final URI s, final URI o, final String... labels) { + + try { + +// final RepositoryResult<Statement> result = +// cxn().getStatements(s, p, o, false); +// +// return new EdgeIterable(result); + + final StringBuilder sb = new StringBuilder(); + sb.append("construct { ?from ?edge ?to . } where {\n"); + sb.append("?edge rdf:type bd:Edge . ?from ?edge ?to .\n"); + if (labels != null && labels.length > 0) { + if (labels.length == 1) { + sb.append("?edge rdfs:label \"").append(labels[0]).append("\" .\n"); + } else { + sb.append("?edge rdfs:label ?label .\n"); + sb.append("filter(?label in ("); + for (String label : labels) { + sb.append("\""+label+"\", "); + } + sb.setLength(sb.length()-2); + sb.append(")) .\n"); + } + } + sb.append("}"); + + final String queryStr = sb.toString() + .replace("?from", s != null ? "<"+s+">" : "?from") + .replace("?to", o != null ? "<"+o+">" : "?to"); + + final org.openrdf.query.GraphQuery query = + cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); + + final GraphQueryResult stmts = query.evaluate(); + + return new EdgeIterable(stmts); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + public Iterable<Vertex> getVertices(final URI s, final URI o, + final String... labels) { + + if (s != null && o != null) { + throw new IllegalArgumentException(); + } + + if (s == null && o == null) { + throw new IllegalArgumentException(); + } + + try { + +// final RepositoryResult<Statement> result = +// cxn().getStatements(s, null, o, false); +// +// return new VertexIterable(result, s == null); + + final StringBuilder sb = new StringBuilder(); + sb.append("construct { ?from ?edge ?to . } where {\n"); + sb.append("?edge rdf:type bd:Edge . ?from ?edge ?to .\n"); + if (labels != null && labels.length > 0) { + if (labels.length == 1) { + sb.append("?edge rdfs:label \"").append(labels[0]).append("\" .\n"); + } else { + sb.append("?edge rdfs:label ?label .\n"); + sb.append("filter(?label in ("); + for (String label : labels) { + sb.append("\""+label+"\", "); + } + sb.setLength(sb.length()-2); + sb.append(")) .\n"); + } + } + sb.append("}"); + + final String queryStr = sb.toString() + .replace("?from", s != null ? "<"+s+">" : "?from") + .replace("?to", o != null ? "<"+o+">" : "?to"); + + final org.openrdf.query.GraphQuery query = + cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); + + final GraphQueryResult stmts = query.evaluate(); + + return new VertexIterable(stmts, s == null); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + public final <T> Iterable<T> fuse(final Iterable<T>... args) { + + return new FusedIterable<T>(args); + } + + + @Override + public Iterable<Edge> getEdges(final String prop, final Object val) { + + final URI p = factory.toPropertyURI(prop); + final Literal o = factory.toLiteral(val); + + try { + + final String queryStr = IOUtils.toString( + getClass().getResourceAsStream("edgesByProperty.rq")) + .replace("?prop", "<"+p+">") + .replace("?val", o.toString()); + + final org.openrdf.query.GraphQuery query = + cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); + + final GraphQueryResult stmts = query.evaluate(); + + return new EdgeIterable(stmts); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + @Override + public Features getFeatures() { + + return FEATURES; + + } + + @Override + public Vertex getVertex(final Object key) { + + if (key == null) + throw new IllegalArgumentException(); + + final URI uri = factory.toVertexURI(key.toString()); + try { + if (cxn().hasStatement(uri, RDF.TYPE, VERTEX, false)) { + return new BigdataVertex(uri, this); + } + return null; + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + @Override + public Iterable<Vertex> getVertices() { + + try { + final RepositoryResult<Statement> result = + cxn().getStatements(null, RDF.TYPE, VERTEX, false); + return new VertexIterable(result, true); + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + @Override + public Iterable<Vertex> getVertices(String prop, Object val) { + + final URI p = factory.toPropertyURI(prop); + final Literal o = factory.toLiteral(val); + try { + final RepositoryResult<Statement> result = + cxn().getStatements(null, p, o, false); + return new VertexIterable(result, true); + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + @Override + public GraphQuery query() { + return new DefaultGraphQuery(this); + } + + @Override + public void removeEdge(final Edge edge) { + try { + final URI uri = factory.toURI(edge); + if (!cxn().hasStatement(uri, RDF.TYPE, EDGE, false)) { + throw new IllegalStateException(); + } + final URI wild = null; + // remove the edge statement + cxn().remove(wild, uri, wild); + // remove its properties + cxn().remove(uri, wild, wild); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public void removeVertex(final Vertex vertex) { + try { + final URI uri = factory.toURI(vertex); + if (!cxn().hasStatement(uri, RDF.TYPE, VERTEX, false)) { + throw new IllegalStateException(); + } + final URI wild = null; + // remove outgoing links and properties + cxn().remove(uri, wild, wild); + // remove incoming links + cxn().remove(wild, wild, uri); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + +// @Override +// public void commit() { +// try { +// cxn().commit(); +// } catch (RepositoryException e) { +// throw new RuntimeException(e); +// } +// } +// +// @Override +// public void rollback() { +// try { +// cxn().rollback(); +// cxn.close(); +// cxn = repo.getUnisolatedConnection(); +// cxn.setAutoCommit(false); +// } catch (RepositoryException e) { +// throw new RuntimeException(e); +// } +// } +// +// @Override +// public void shutdown() { +// try { +// cxn.close(); +// repo.shutDown(); +// } catch (RepositoryException e) { +// throw new RuntimeException(e); +// } +// } +// +// @Override +// @Deprecated +// public void stopTransaction(Conclusion arg0) { +// } + + public class VertexIterable implements Iterable<Vertex>, Iterator<Vertex> { + + private final CloseableIteration<Statement, ? extends OpenRDFException> stmts; + + private final boolean subject; + + private final List<Vertex> cache; + + public VertexIterable( + final CloseableIteration<Statement, ? extends OpenRDFException> stmts, + final boolean subject) { + this.stmts = stmts; + this.subject = subject; + this.cache = new LinkedList<Vertex>(); + } + + @Override + public boolean hasNext() { + try { + return stmts.hasNext(); + } catch (OpenRDFException e) { + throw new RuntimeException(e); + } + } + + @Override + public Vertex next() { + try { + final Statement stmt = stmts.next(); + final URI v = (URI) + (subject ? stmt.getSubject() : stmt.getObject()); + if (!hasNext()) { + stmts.close(); + } + final Vertex vertex = new BigdataVertex(v, BigdataGraph.this); + cache.add(vertex); + return vertex; + } catch (OpenRDFException e) { + throw new RuntimeException(e); + } + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + + @Override + public Iterator<Vertex> iterator() { + return hasNext() ? this : cache.iterator(); + } + + } + + public class EdgeIterable implements Iterable<Edge>, Iterator<Edge> { + + private final CloseableIteration<Statement, ? extends OpenRDFException> stmts; + + private final List<Edge> cache; + + public EdgeIterable( + final CloseableIteration<Statement, ? extends OpenRDFException> stmts) { + this.stmts = stmts; + this.cache = new LinkedList<Edge>(); + } + + @Override + public boolean hasNext() { + try { + return stmts.hasNext(); + } catch (OpenRDFException e) { + throw new RuntimeException(e); + } + } + + @Override + public Edge next() { + try { + final Statement stmt = stmts.next(); + if (!hasNext()) { + stmts.close(); + } + final Edge edge = new BigdataEdge(stmt, BigdataGraph.this); + cache.add(edge); + return edge; + } catch (OpenRDFException e) { + throw new RuntimeException(e); + } + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + + @Override + public Iterator<Edge> iterator() { + return hasNext() ? this : cache.iterator(); + } + + } + + public class FusedIterable<T> implements Iterable<T>, Iterator<T> { + + private final Iterable<T>[] args; + + private transient int i = 0; + + private transient Iterator<T> curr; + + public FusedIterable(final Iterable<T>... args) { + this.args = args; + this.curr = args[0].iterator(); + } + + @Override + public boolean hasNext() { + if (curr.hasNext()) { + return true; + } + while (!curr.hasNext() && i < (args.length-1)) { + curr = args[++i].iterator(); + if (curr.hasNext()) { + return true; + } + } + return false; + } + + @Override + public T next() { + return curr.next(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + } + + @Override + public Iterator<T> iterator() { + return this; + } + + } + + protected static final Features FEATURES = new Features(); + + static { + + FEATURES.supportsSerializableObjectProperty = false; + FEATURES.supportsBooleanProperty = true; + FEATURES.supportsDoubleProperty = true; + FEATURES.supportsFloatProperty = true; + FEATURES.supportsIntegerProperty = true; + FEATURES.supportsPrimitiveArrayProperty = false; + FEATURES.supportsUniformListProperty = false; + FEATURES.supportsMixedListProperty = false; + FEATURES.supportsLongProperty = true; + FEATURES.supportsMapProperty = false; + FEATURES.supportsStringProperty = true; + + FEATURES.supportsDuplicateEdges = true; + FEATURES.supportsSelfLoops = true; + FEATURES.isPersistent = true; + FEATURES.isWrapper = false; + FEATURES.supportsVertexIteration = true; + FEATURES.supportsEdgeIteration = true; + FEATURES.supportsVertexIndex = false; + FEATURES.supportsEdgeIndex = false; + FEATURES.ignoresSuppliedIds = true; + FEATURES.supportsTransactions = false; + FEATURES.supportsIndices = true; + FEATURES.supportsKeyIndices = true; + FEATURES.supportsVertexKeyIndex = true; + FEATURES.supportsEdgeKeyIndex = true; + FEATURES.supportsEdgeRetrieval = true; + FEATURES.supportsVertexProperties = true; + FEATURES.supportsEdgeProperties = true; + FEATURES.supportsThreadedTransactions = false; + } + +} Property changes on: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property Added: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphBulkLoad.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphBulkLoad.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphBulkLoad.java 2014-05-07 20:24:56 UTC (rev 8223) @@ -0,0 +1,325 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.blueprints; + +import java.util.UUID; + +import org.openrdf.model.Literal; +import org.openrdf.model.URI; +import org.openrdf.model.impl.StatementImpl; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.RDFS; +import org.openrdf.repository.RepositoryConnection; + +import com.bigdata.rdf.changesets.IChangeLog; +import com.bigdata.rdf.changesets.IChangeRecord; +import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; +import com.tinkerpop.blueprints.Edge; +import com.tinkerpop.blueprints.GraphQuery; +import com.tinkerpop.blueprints.TransactionalGraph; +import com.tinkerpop.blueprints.Vertex; + +/** + * Simple bulk loader that will insert graph data without any consistency + * checking (won't check for duplicate vertex or edge identifiers). Currently + * does not overwrite old property values, but we may need to change this. + * <p> + * Implements {@link IChangeLog} so that we can report a mutation count. + * + * @author mikepersonick + * + */ +public class BigdataGraphBulkLoad extends BigdataGraph + implements TransactionalGraph, IChangeLog { + + private final BigdataSailRepositoryConnection cxn; + + public BigdataGraphBulkLoad(final BigdataSailRepositoryConnection cxn) { + this(cxn, BigdataRDFFactory.INSTANCE); + } + + public BigdataGraphBulkLoad(final BigdataSailRepositoryConnection cxn, + final BlueprintsRDFFactory factory) { + super(factory); + + this.cxn = cxn; + this.cxn.addChangeLog(this); + } + + protected RepositoryConnection cxn() throws Exception { + return cxn; + } + + @Override + public void commit() { + try { + cxn.commit(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public void rollback() { + try { + cxn.rollback(); + cxn.close(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public void shutdown() { + try { + cxn.close(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + @Deprecated + public void stopTransaction(Conclusion arg0) { + } + + + static { + +// FEATURES.supportsSerializableObjectProperty = false; +// FEATURES.supportsBooleanProperty = true; +// FEATURES.supportsDoubleProperty = true; +// FEATURES.supportsFloatProperty = true; +// FEATURES.supportsIntegerProperty = true; +// FEATURES.supportsPrimitiveArrayProperty = false; +// FEATURES.supportsUniformListProperty = false; +// FEATURES.supportsMixedListProperty = false; +// FEATURES.supportsLongProperty = true; +// FEATURES.supportsMapProperty = false; +// FEATURES.supportsStringProperty = true; +// +// FEATURES.supportsDuplicateEdges = true; +// FEATURES.supportsSelfLoops = true; +// FEATURES.isPersistent = true; +// FEATURES.isWrapper = false; +// FEATURES.supportsVertexIteration = true; +// FEATURES.supportsEdgeIteration = true; +// FEATURES.supportsVertexIndex = false; +// FEATURES.supportsEdgeIndex = false; +// FEATURES.ignoresSuppliedIds = true; + BigdataGraph.FEATURES.supportsTransactions = true; +// FEATURES.supportsIndices = true; +// FEATURES.supportsKeyIndices = true; +// FEATURES.supportsVertexKeyIndex = true; +// FEATURES.supportsEdgeKeyIndex = true; +// FEATURES.supportsEdgeRetrieval = true; +// FEATURES.supportsVertexProperties = true; +// FEATURES.supportsEdgeProperties = true; +// FEATURES.supportsThreadedTransactions = false; + } + + + @Override + public Edge getEdge(Object arg0) { + throw new UnsupportedOperationException(); + } + + @Override + public Iterable<Edge> getEdges() { + throw new UnsupportedOperationException(); + } + + @Override + public Iterable<Edge> getEdges(String arg0, Object arg1) { + throw new UnsupportedOperationException(); + } + + @Override + public Vertex getVertex(Object arg0) { + throw new UnsupportedOperationException(); + } + + @Override + public Iterable<Vertex> getVertices() { + throw new UnsupportedOperationException(); + } + + @Override + public Iterable<Vertex> getVertices(String arg0, Object arg1) { + throw new UnsupportedOperationException(); + } + + @Override + public GraphQuery query() { + throw new UnsupportedOperationException(); + } + + @Override + public void removeEdge(Edge arg0) { + throw new UnsupportedOperationException(); + } + + @Override + public void removeVertex(Vertex arg0) { + throw new UnsupportedOperationException(); + } + + /** + * Set a property without removing the old value first. + */ + @Override + public void setProperty(final URI s, final URI p, final Literal o) { + + try { + +// cxn().remove(s, p, null); + + cxn().add(s, p, o); + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + + /** + * Add a vertex without consistency checking (does not check for a duplicate + * identifier). + */ + @Override + public Vertex addVertex(final Object key) { + + try { + + final String vid = key != null ? + key.toString() : UUID.randomUUID().toString(); + + final URI uri = factory.toVertexURI(vid); + +// if (cxn().hasStatement(vertexURI, RDF.TYPE, VERTEX, false)) { +// throw new IllegalArgumentException("vertex " + vid + " already exists"); +// } + + cxn().add(uri, RDF.TYPE, VERTEX); + + return new BigdataVertex(uri, this); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Add an edge without consistency checking (does not check for a duplicate + * identifier). + */ + @Override + public Edge addEdge(final Object key, final Vertex from, final Vertex to, + final String label) { + + if (label == null) { + throw new IllegalArgumentException(); + } + + final String eid = key != null ? key.toString() : UUID.randomUUID().toString(); + + final URI edgeURI = factory.toEdgeURI(eid); + +// if (key != null) { +// +// final Edge edge = getEdge(key); +// +// if (edge != null) { +// if (!(edge.getVertex(Direction.OUT).equals(from) && +// (edge.getVertex(Direction.OUT).equals(to)))) { +// throw new IllegalArgumentException("edge already exists: " + key); +// } +// } +// +// } + + try { + +// if (cxn().hasStatement(edgeURI, RDF.TYPE, EDGE, false)) { +// throw new IllegalArgumentException("edge " + eid + " already exists"); +// } + + final URI fromURI = factory.toVertexURI(from.getId().toString()); + final URI toURI = factory.toVertexURI(to.getId().toString()); + + cxn().add(fromURI, edgeURI, toURI); + cxn().add(edgeURI, RDF.TYPE, EDGE); + cxn().add(edgeURI, RDFS.LABEL, factory.toLiteral(label)); + + return new BigdataEdge(new StatementImpl(fromURI, edgeURI, toURI), this); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + private transient long mutationCountTotal = 0; + private transient long mutationCountCurrentCommit = 0; + private transient long mutationCountLastCommit = 0; + + @Override + public void changeEvent(final IChangeRecord record) { + mutationCountTotal++; + mutationCountCurrentCommit++; + } + + @Override + public void transactionBegin() { + } + + @Override + public void transactionPrepare() { + } + + @Override + public void transactionCommited(long commitTime) { + mutationCountLastCommit = mutationCountCurrentCommit; + mutationCountCurrentCommit = 0; + } + + @Override + public void transactionAborted() { + } + + public long getMutationCountTotal() { + return mutationCountTotal; + } + + public long getMutationCountCurrentCommit() { + return mutationCountCurrentCommit; + } + + public long getMutationCountLastCommit() { + return mutationCountLastCommit; + } + + + +} Property changes on: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphBulkLoad.java ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property Added: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java 2014-05-07 20:24:56 UTC (rev 8223) @@ -0,0 +1,112 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.blueprints; + +import com.bigdata.rdf.sail.remote.BigdataSailRemoteRepository; +import com.bigdata.rdf.sail.remote.BigdataSailRemoteRepositoryConnection; +import com.bigdata.rdf.sail.webapp.client.RemoteRepository; + +/** + * This is a thin-client implementation of a Blueprints wrapper around the + * client library that interacts with the NanoSparqlServer. This is a functional + * implementation suitable for writing POCs... [truncated message content] |
From: <jer...@us...> - 2014-05-07 15:57:56
|
Revision: 8222 http://sourceforge.net/p/bigdata/code/8222 Author: jeremy_carroll Date: 2014-05-07 15:57:53 +0000 (Wed, 07 May 2014) Log Message: ----------- removed unnecessary UTF-8 encoding pref Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java Removed Paths: ------------- branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs Deleted: branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs 2014-05-07 15:39:17 UTC (rev 8221) +++ branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs 2014-05-07 15:57:53 UTC (rev 8222) @@ -1,2 +0,0 @@ -eclipse.preferences.version=1 -encoding//bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java=UTF-8 Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java 2014-05-07 15:39:17 UTC (rev 8221) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java 2014-05-07 15:57:53 UTC (rev 8222) @@ -544,7 +544,7 @@ final Class<? extends Analyzer> cls = getAnalyzerClass(); if (hasConstructor(cls, Version.class, Set.class)) { - + // RussianAnalyzer is missing any way to access stop words. if (RussianAnalyzer.class.equals(cls) && useDefaultStopWords()) { return new AnalyzerPair(languageRange, new RussianAnalyzer(Version.LUCENE_CURRENT, Collections.EMPTY_SET), new RussianAnalyzer(Version.LUCENE_CURRENT)); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jer...@us...> - 2014-05-07 15:39:20
|
Revision: 8221 http://sourceforge.net/p/bigdata/code/8221 Author: jeremy_carroll Date: 2014-05-07 15:39:17 +0000 (Wed, 07 May 2014) Log Message: ----------- Initial version of ConfigurableAnalyzerFactory to address trac 912 Added Paths: ----------- branches/BIGDATA_RELEASE_1_3_0/.settings/ branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/EmptyAnalyzer.java Added: branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/.settings/org.eclipse.core.resources.prefs 2014-05-07 15:39:17 UTC (rev 8221) @@ -0,0 +1,2 @@ +eclipse.preferences.version=1 +encoding//bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java=UTF-8 Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/ConfigurableAnalyzerFactory.java 2014-05-07 15:39:17 UTC (rev 8221) @@ -0,0 +1,805 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on May 6, 2014 by Jeremy J. Carroll, Syapse Inc. + */ +package com.bigdata.search; + +import java.io.IOException; +import java.io.StringReader; +import java.lang.reflect.Constructor; +import java.util.Arrays; +import java.util.Collections; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.regex.Pattern; + +import org.apache.log4j.Logger; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.KeywordAnalyzer; +import org.apache.lucene.analysis.SimpleAnalyzer; +import org.apache.lucene.analysis.StopAnalyzer; +import org.apache.lucene.analysis.WhitespaceAnalyzer; +import org.apache.lucene.analysis.miscellaneous.PatternAnalyzer; +import org.apache.lucene.analysis.ru.RussianAnalyzer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.util.Version; + +import com.bigdata.btree.keys.IKeyBuilder; +import com.bigdata.btree.keys.KeyBuilder; + +/** + * This class can be used with the bigdata properties file to specify + * which {@link Analyzer}s are used for which languages. + * Languages are specified by the language tag on RDF literals, which conform + * with <a href="http://www.rfc-editor.org/rfc/rfc5646.txt">RFC 5646</a>. + * Within bigdata plain literals are assigned to the default locale's language. + * + * The bigdata properties are used to map language ranges, as specified by + * <a href="http://www.rfc-editor.org/rfc/rfc4647.txt">RFC 4647</a> to classes which extend {@link Analyzer}. + * Supported classes included all the natural language specific classes from Lucene, and also: + * <ul> + * <li>{@link PatternAnalyzer} + * <li>{@link KeywordAnalyzer} + * <li>{@link SimpleAnalyzer} + * <li>{@link StopAnalyzer} + * <li>{@link WhitespaceAnalyzer} + * <li>{@link StandardAnalyzer} + * </ul> + * More generally any subclass of {@link Analyzer} that has at least one constructor matching: + * <ul> + * <li>no arguments + * <li>{@link Version} + * <li>{@link Set} (of strings, the stop words) + * <li>{@link Version}, {@link Set} + * </ul> + * is usable. If the class has a static method named <code>getDefaultStopSet()</code> then this is assumed + * to do what it says on the can; some of the Lucene analyzers store their default stop words elsewhere, + * and such stopwords are usable by this class. If no stop word set can be found, and there is a constructor without + * stopwords and a constructor with stopwords, then the former is assumed to use a default stop word set. + * <p> + * Configuration is by means of the bigdata properties file. + * All relevant properties start <code>com.bigdata.search.ConfigurableAnalyzerFactory</code> which we + * abbreviate to <code>c.b.s.C</code> in this documentation. + * Properties from {@link Options} apply to the factory. + * <p> + * + * If there are no such properties at all then the property {@link Options#INCLUDE_DEFAULTS} is set to true, + * and the behavior of this class is the same as the legacy {@link DefaultAnalyzerFactory}. + * <p> + * Other properties, from {@link AnalyzerOptions} start with + * <code>c.b.s.C.analyzer.<em>language-range</em></code> where <code><em>language-range</em></code> conforms + * with the extended language range construct from RFC 4647, section 2.2. These are used to specify + * an analyzer for the given language range. + * <p> + * If no analyzer is specified for the language range <code>*</code> then the {@link StandardAnalyzer} is used. + * <p> + * Given any specific language, then the analyzer matching the longest configured language range, + * measured in number of subtags is used {@link #getAnalyzer(String, boolean)} + * In the event of a tie, the alphabetically first language range is used. + * The algorithm to find a match is "Extended Filtering" as defined in section 3.3.2 of RFC 4647. + * <p> + * Some useful analyzers are as follows: + * <dl> + * <dt>{@link KeywordAnalyzer}</dt> + * <dd>This treats every lexical value as a single search token</dd> + * <dt>{@link WhitespaceAnalyzer}</dt> + * <dd>This uses whitespace to tokenize</dd> + * <dt>{@link PatternAnalyzer}</dt> + * <dd>This uses a regular expression to tokenize</dd> + * <dt>{@link EmptyAnalyzer}</dt> + * <dd>This suppresses the functionality, by treating every expression as a stop word.</dd> + * </dl> + * there are in addition the language specific analyzers that are included + * by using the option {@link Options#INCLUDE_DEFAULTS} + * + * + * @author jeremycarroll + * + */ +public class ConfigurableAnalyzerFactory implements IAnalyzerFactory { + final private static transient Logger log = Logger.getLogger(ConfigurableAnalyzerFactory.class); + + private static class LanguageRange implements Comparable<LanguageRange> { + + private final String range[]; + private final String full; + + public LanguageRange(String range) { + this.range = range.split("-"); + full = range; + } + + @Override + public int compareTo(LanguageRange o) { + if (equals(o)) { + return 0; + } + int diff = o.range.length - range.length; + if (diff != 0) { + // longest first + return diff; + } + if (range.length == 1) { + // * last + if (range[0].equals("*")) { + return 1; + } + if (o.range[0].equals("*")) { + return -1; + } + } + // alphabetically + for (int i=0; i<range.length; i++) { + diff = range[i].compareTo(o.range[i]); + if (diff != 0) { + return diff; + } + } + throw new RuntimeException("Impossible - supposedly"); + } + + @Override + public boolean equals(Object o) { + return (o instanceof LanguageRange) && ((LanguageRange)o).full.equals(full); + } + @Override + public int hashCode() { + return full.hashCode(); + } + + // See RFC 4647, 3.3.2 + public boolean extendedFilterMatch(String[] language) { + // RFC 4647 step 2 + if (!matchSubTag(language[0], range[0])) { + return false; + } + int rPos = 1; + int lPos = 1; + // variant step - for private use flags + if (language[0].equals("x") && range[0].equals("*")) { + lPos = 0; + } + // RFC 4647 step 3 + while (rPos < range.length) { + // step 3A + if (range[rPos].equals("*")) { + rPos ++; + continue; + } + // step 3B + if (lPos >= language.length) { + return false; + } + // step 3C + if (matchSubTag(language[lPos], range[rPos])) { + lPos++; + rPos++; + continue; + } + if (language[lPos].length()==1) { + return false; + } + lPos++; + } + // RFC 4647 step 4 + return true; + } + + // RFC 4647, 3.3.2, step 1 + private boolean matchSubTag(String langSubTag, String rangeSubTag) { + return langSubTag.equals(rangeSubTag) || "*".equals(rangeSubTag); + } + + } + /** + * Options understood by the {@link ConfigurableAnalyzerFactory}. + */ + public interface Options { + /** + * By setting this option to true, then the behavior of the legacy {@link DefaultAnalyzerFactory} + * is added, and may be overridden by the settings of the user. + * Specifically the following properties are loaded, prior to loading the + * user's specification (with <code>c.b.s.C</code> expanding to + * <code>com.bigdata.search.ConfigurableAnalyzerFactory</code>) +<pre> +c.b.s.C.analyzer.*.like=eng +c.b.s.C.analyzer.por.analyzerClass=org.apache.lucene.analysis.br.BrazilianAnalyzer +c.b.s.C.analyzer.pt.like=por +c.b.s.C.analyzer.zho.analyzerClass=org.apache.lucene.analysis.cn.ChineseAnalyzer +c.b.s.C.analyzer.chi.like=zho +c.b.s.C.analyzer.zh.like=zho +c.b.s.C.analyzer.jpn.analyzerClass=org.apache.lucene.analysis.cjk.CJKAnalyzer +c.b.s.C.analyzer.ja.like=jpn +c.b.s.C.analyzer.kor.like=jpn +c.b.s.C.analyzer.ko.like=kor +c.b.s.C.analyzer.ces.analyzerClass=org.apache.lucene.analysis.cz.CzechAnalyzer +c.b.s.C.analyzer.cze.like=ces +c.b.s.C.analyzer.cs.like=ces +c.b.s.C.analyzer.dut.analyzerClass=org.apache.lucene.analysis.nl.DutchAnalyzer +c.b.s.C.analyzer.nld.like=dut +c.b.s.C.analyzer.nl.like=dut +c.b.s.C.analyzer.deu.analyzerClass=org.apache.lucene.analysis.de.GermanAnalyzer +c.b.s.C.analyzer.ger.like=deu +c.b.s.C.analyzer.de.like=deu +c.b.s.C.analyzer.gre.analyzerClass=org.apache.lucene.analysis.el.GreekAnalyzer +c.b.s.C.analyzer.ell.like=gre +c.b.s.C.analyzer.el.like=gre +c.b.s.C.analyzer.rus.analyzerClass=org.apache.lucene.analysis.ru.RussianAnalyzer +c.b.s.C.analyzer.ru.like=rus +c.b.s.C.analyzer.tha.analyzerClass=org.apache.lucene.analysis.th.ThaiAnalyzer +c.b.s.C.analyzer.th.like=tha +c.b.s.C.analyzer.eng.analyzerClass=org.apache.lucene.analysis.standard.StandardAnalyzer +c.b.s.C.analyzer.en.like=eng +</pre> + * + * + */ + String INCLUDE_DEFAULTS = ConfigurableAnalyzerFactory.class.getName() + ".includeDefaults"; + /** + * This is the prefix to all properties configuring the individual analyzers. + */ + String ANALYZER = ConfigurableAnalyzerFactory.class.getName() + ".analyzer."; +/** + * If there is no configuration at all, then the defaults are included, + * but any configuration at all totally replaces the defaults, unless + * {@link #INCLUDE_DEFAULTS} + * is explicitly set to true. + */ + String DEFAULT_INCLUDE_DEFAULTS = "false"; + } + /** + * Options understood by analyzers created by {@link ConfigurableAnalyzerFactory}. + * These options are appended to the RFC 4647 language range + */ + public interface AnalyzerOptions { + /** + * If specified this is the fully qualified name of a subclass of {@link Analyzer} + * that has appropriate constructors. + * Either this or {@link #LIKE} or {@link #PATTERN} must be specified for each language range. + */ + String ANALYZER_CLASS = "analyzerClass"; + + /** + * The value of this property is a language range, for which + * an analyzer is defined. + * Treat this language range in the same way as the specified + * language range. + * + * {@link #LIKE} loops are not permitted. + * + * If this is option is specified for a language range, + * then no other option is permitted. + */ + String LIKE = "like"; + + /** + * The value of this property is one of: + * <dl> + * <dt>{@link #STOPWORDS_VALUE_NONE}</dt> + * <dd>This analyzer is used without stop words.</dd> + * <dt>{@link #STOPWORDS_VALUE_DEFAULT}</dt> + * <dd>Use the default setting for stopwords for this analyzer. It is an error + * to set this value on some analyzers such as {@link SimpleAnalyzer} that do not supprt stop words. + * </dd> + * <dt>A fully qualified class name</dt> + * <dd>... of a subclass of {@link Analyzer} which + * has a static method <code>getDefaultStopSet()</code>, in which case, the returned set of stop words is used. + * </dd> + * </dl> + * If the {@link #ANALYZER_CLASS} does not support stop words then any value other than {@link #STOPWORDS_VALUE_NONE} is an error. + * If the {@link #ANALYZER_CLASS} does support stop words then the default value is {@link #STOPWORDS_VALUE_DEFAULT} + */ + String STOPWORDS = "stopwords"; + + String STOPWORDS_VALUE_DEFAULT = "default"; + + String STOPWORDS_VALUE_NONE = "none"; + /** + * If this property is present then the analyzer being used is a + * {@link PatternAnalyzer} and the value is the pattern to use. + * (Note the {@link Pattern#UNICODE_CHARACTER_CLASS} flag is enabled). + * It is an error if a different analyzer class is specified. + */ + String PATTERN = ".pattern"; + + } + + private static final String DEFAULT_PROPERTIES = + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.*.like=eng\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.por.analyzerClass=org.apache.lucene.analysis.br.BrazilianAnalyzer\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.pt.like=por\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.zho.analyzerClass=org.apache.lucene.analysis.cn.ChineseAnalyzer\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.chi.like=zho\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.zh.like=zho\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.jpn.analyzerClass=org.apache.lucene.analysis.cjk.CJKAnalyzer\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.ja.like=jpn\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.kor.like=jpn\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.ko.like=kor\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.ces.analyzerClass=org.apache.lucene.analysis.cz.CzechAnalyzer\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.cze.like=ces\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.cs.like=ces\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.dut.analyzerClass=org.apache.lucene.analysis.nl.DutchAnalyzer\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.nld.like=dut\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.nl.like=dut\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.deu.analyzerClass=org.apache.lucene.analysis.de.GermanAnalyzer\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.ger.like=deu\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.de.like=deu\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.gre.analyzerClass=org.apache.lucene.analysis.el.GreekAnalyzer\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.ell.like=gre\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.el.like=gre\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.rus.analyzerClass=org.apache.lucene.analysis.ru.RussianAnalyzer\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.ru.like=rus\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.tha.analyzerClass=org.apache.lucene.analysis.th.ThaiAnalyzer\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.th.like=tha\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.eng.analyzerClass=org.apache.lucene.analysis.standard.StandardAnalyzer\n" + + "com.bigdata.search.ConfigurableAnalyzerFactory.analyzer.en.like=eng\n"; + + private static class AnalyzerPair implements Comparable<AnalyzerPair>{ + private final LanguageRange range; + private final Analyzer withStopWords; + private final Analyzer withoutStopWords; + + AnalyzerPair(String range, Analyzer withStopWords, Analyzer withOutStopWords) { + this.range = new LanguageRange(range); + this.withStopWords = withStopWords; + this.withoutStopWords = withOutStopWords; + } + + AnalyzerPair(String range, AnalyzerPair copyMe) { + this.range = new LanguageRange(range); + this.withStopWords = copyMe.withStopWords; + this.withoutStopWords = copyMe.withoutStopWords; + + } + + public Analyzer getAnalyzer(boolean filterStopwords) { + return filterStopwords ? withStopWords : withoutStopWords; + } + @Override + public String toString() { + return range.full + "=(" + withStopWords.getClass().getSimpleName() +")"; + } + + + AnalyzerPair(String range, Constructor<? extends Analyzer> cons, Object ... params) throws Exception { + this(range, cons.newInstance(params), cons.newInstance(useEmptyStopWordSet(params))); + } + AnalyzerPair(String range, Analyzer stopWordsNotSupported) { + this(range, stopWordsNotSupported, stopWordsNotSupported); + } + private static Object[] useEmptyStopWordSet(Object[] params) { + Object rslt[] = new Object[params.length]; + for (int i=0; i<params.length; i++) { + if (params[i] instanceof Set) { + rslt[i] = Collections.EMPTY_SET; + } else { + rslt[i] = params[i]; + } + } + return rslt; + } + @Override + public int compareTo(AnalyzerPair o) { + return range.compareTo(o.range); + } + + public boolean extendedFilterMatch(String[] language) { + return range.extendedFilterMatch(language); + } + } + + + private static class VersionSetAnalyzerPair extends AnalyzerPair { + public VersionSetAnalyzerPair(ConfigOptionsToAnalyzer lro, + Class<? extends Analyzer> cls) throws Exception { + super(lro.languageRange, getConstructor(cls, Version.class, Set.class), Version.LUCENE_CURRENT, lro.getStopWords()); + } + } + + private static class VersionAnalyzerPair extends AnalyzerPair { + + public VersionAnalyzerPair(String range, Class<? extends Analyzer> cls) throws Exception { + super(range, getConstructor(cls, Version.class).newInstance(Version.LUCENE_CURRENT)); + } + } + + + private static class PatternAnalyzerPair extends AnalyzerPair { + + public PatternAnalyzerPair(ConfigOptionsToAnalyzer lro, String pattern) throws Exception { + super(lro.languageRange, getConstructor(PatternAnalyzer.class,Version.class,Pattern.class,Boolean.TYPE,Set.class), + Version.LUCENE_CURRENT, + Pattern.compile(pattern, Pattern.UNICODE_CHARACTER_CLASS), + true, + lro.getStopWords()); + } + } + + + /** + * This class is initialized with the config options, using the {@link #setProperty(String, String)} + * method, for a particular language range and works out which pair of {@link Analyzer}s + * to use for that language range. + * @author jeremycarroll + * + */ + private static class ConfigOptionsToAnalyzer { + + String like; + String className; + String stopwords; + String pattern; + final String languageRange; + AnalyzerPair result; + + public ConfigOptionsToAnalyzer(String languageRange) { + this.languageRange = languageRange; + } + + /** + * This is called only when we have already identified that + * the class does support stopwords. + * @return + */ + public Set<?> getStopWords() { + + if (AnalyzerOptions.STOPWORDS_VALUE_NONE.equals(stopwords)) + return Collections.EMPTY_SET; + + if (useDefaultStopWords()) { + return getStopWordsForClass(className); + } + + return getStopWordsForClass(stopwords); + } + + protected Set<?> getStopWordsForClass(String clazzName) { + Class<? extends Analyzer> analyzerClass = getAnalyzerClass(clazzName); + try { + return (Set<?>) analyzerClass.getMethod("getDefaultStopSet").invoke(null); + } catch (Exception e) { + if (StandardAnalyzer.class.equals(analyzerClass)) { + return StandardAnalyzer.STOP_WORDS_SET; + } + if (StopAnalyzer.class.equals(analyzerClass)) { + return StopAnalyzer.ENGLISH_STOP_WORDS_SET; + } + throw new RuntimeException("Failed to find stop words from " + clazzName + " for language range "+languageRange); + } + } + + protected boolean useDefaultStopWords() { + return stopwords == null || AnalyzerOptions.STOPWORDS_VALUE_DEFAULT.equals(stopwords); + } + + public boolean setProperty(String shortProperty, String value) { + if (shortProperty.equals(AnalyzerOptions.LIKE) ) { + like = value; + } else if (shortProperty.equals(AnalyzerOptions.ANALYZER_CLASS) ) { + className = value; + } else if (shortProperty.equals(AnalyzerOptions.STOPWORDS) ) { + stopwords = value; + } else if (shortProperty.equals(AnalyzerOptions.PATTERN) ) { + pattern = value; + } else { + return false; + } + return true; + } + + public void validate() { + if (pattern != null ) { + if ( className != null && className != PatternAnalyzer.class.getName()) { + throw new RuntimeException("Bad Option: Language range "+languageRange + " with pattern propety for class "+ className); + } + className = PatternAnalyzer.class.getName(); + } + if (PatternAnalyzer.class.getName().equals(className) && pattern == null ) { + throw new RuntimeException("Bad Option: Language range "+languageRange + " must specify pattern for PatternAnalyzer."); + } + if ( (like != null) == (className != null) ) { + throw new RuntimeException("Bad Option: Language range "+languageRange + " must specify exactly one of implementation class or like."); + } + if (stopwords != null && like != null) { + throw new RuntimeException("Bad Option: Language range "+languageRange + " must not specify stopwords with like."); + } + + } + + private AnalyzerPair construct() throws Exception { + if (className == null) { + return null; + } + if (pattern != null) { + return new PatternAnalyzerPair(this, pattern); + + } + final Class<? extends Analyzer> cls = getAnalyzerClass(); + + if (hasConstructor(cls, Version.class, Set.class)) { + + // RussianAnalyzer is missing any way to access stop words. + if (RussianAnalyzer.class.equals(cls) && useDefaultStopWords()) { + return new AnalyzerPair(languageRange, new RussianAnalyzer(Version.LUCENE_CURRENT, Collections.EMPTY_SET), new RussianAnalyzer(Version.LUCENE_CURRENT)); + } + return new VersionSetAnalyzerPair(this, cls); + } + + if (stopwords != null && !stopwords.equals(AnalyzerOptions.STOPWORDS_VALUE_NONE)) { + throw new RuntimeException("Bad option: language range: " + languageRange + " stopwords are not supported by " + className); + } + if (hasConstructor(cls, Version.class)) { + return new VersionAnalyzerPair(languageRange, cls); + } + + if (hasConstructor(cls)) { + return new AnalyzerPair(languageRange, cls.newInstance()); + } + throw new RuntimeException("Bad option: cannot find constructor for class " + className + " for language range " + languageRange); + } + + protected Class<? extends Analyzer> getAnalyzerClass() { + return getAnalyzerClass(className); + } + + @SuppressWarnings("unchecked") + protected Class<? extends Analyzer> getAnalyzerClass(String className2) { + final Class<? extends Analyzer> cls; + try { + cls = (Class<? extends Analyzer>) Class.forName(className2); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Bad option: cannot find class " + className2 + " for language range " + languageRange, e); + } + return cls; + } + + void setAnalyzerPair(AnalyzerPair ap) { + result = ap; + } + + AnalyzerPair followLikesToAnalyzerPair(int depth, int max, + Map<String, ConfigOptionsToAnalyzer> analyzers) { + if (result == null) { + if (depth == max) { + throw new RuntimeException("Bad configuration: - 'like' loop for language range " + languageRange); + } + ConfigOptionsToAnalyzer next = analyzers.get(like); + if (next == null) { + throw new RuntimeException("Bad option: - 'like' not found for language range " + languageRange+ " (not found: '"+ like +"')"); + } + result = new AnalyzerPair(languageRange, next.followLikesToAnalyzerPair(depth+1, max, analyzers)); + } + return result; + } + + } + + private final AnalyzerPair config[]; + + private final Map<String, AnalyzerPair> langTag2AnalyzerPair = new ConcurrentHashMap<String, AnalyzerPair>(); + + /** + * While it would be very unusual to have more than 500 different language tags in a store + * it is possible - we use a max size to prevent a memory explosion, and a naive caching + * strategy so the code will still work on the {@link #MAX_LANG_CACHE_SIZE}+1 th entry. + */ + private static final int MAX_LANG_CACHE_SIZE = 500; + + private final String defaultLanguage; + + + public ConfigurableAnalyzerFactory(final FullTextIndex<?> fullTextIndex) { + // despite our name, we actually make all the analyzers now, and getAnalyzer method is merely a lookup. + + if (fullTextIndex == null) + throw new IllegalArgumentException(); + + defaultLanguage = getDefaultLanguage(fullTextIndex); + + final Properties properties = initProperties(fullTextIndex); + + final Map<String, ConfigOptionsToAnalyzer> analyzers = new HashMap<String, ConfigOptionsToAnalyzer>(); + + properties2analyzers(properties, analyzers); + + if (!analyzers.containsKey("*")) { + throw new RuntimeException("Bad config: must specify behavior on language range '*'"); + } + + for (ConfigOptionsToAnalyzer a: analyzers.values()) { + a.validate(); + } + + try { + for (ConfigOptionsToAnalyzer a: analyzers.values()) { + a.setAnalyzerPair(a.construct()); + } + } catch (Exception e) { + throw new RuntimeException("Cannot construct ConfigurableAnalyzerFactory", e); + } + int sz = analyzers.size(); + for (ConfigOptionsToAnalyzer a: analyzers.values()) { + a.followLikesToAnalyzerPair(0, sz, analyzers); + } + + config = new AnalyzerPair[sz]; + int i = 0; + for (ConfigOptionsToAnalyzer a: analyzers.values()) { + config[i++] = a.result; + } + Arrays.sort(config); + if (log.isInfoEnabled()) { + StringBuilder sb = new StringBuilder(); + sb.append("Installed text Analyzer's: "); + for (AnalyzerPair ap: config) { + sb.append(ap.toString()); + sb.append(", "); + } + log.info(sb.toString()); + } + } + + private String getDefaultLanguage(final FullTextIndex<?> fullTextIndex) { + + final IKeyBuilder keyBuilder = fullTextIndex.getKeyBuilder(); + + + if (keyBuilder.isUnicodeSupported()) { + + // The configured local for the database. + final Locale locale = ((KeyBuilder) keyBuilder) + .getSortKeyGenerator().getLocale(); + + // The analyzer for that locale. + return locale.getLanguage(); + + } else { + // Rule, Britannia! + return "en"; + + } + } + + private static boolean hasConstructor(Class<? extends Analyzer> cls, Class<?> ... parameterTypes) { + return getConstructor(cls, parameterTypes) != null; + } + + protected static Constructor<? extends Analyzer> getConstructor(Class<? extends Analyzer> cls, + Class<?>... parameterTypes) { + try { + return cls.getConstructor(parameterTypes); + } catch (NoSuchMethodException | SecurityException e) { + return null; + } + } + + private void properties2analyzers(Properties props, Map<String, ConfigOptionsToAnalyzer> analyzers) { + + Enumeration<?> en = props.propertyNames(); + while (en.hasMoreElements()) { + + String prop = (String)en.nextElement(); + if (prop.equals(Options.INCLUDE_DEFAULTS)) continue; + if (prop.startsWith(Options.ANALYZER)) { + String languageRangeAndProperty[] = prop.substring(Options.ANALYZER.length()).split("[.]"); + if (languageRangeAndProperty.length == 2) { + + String languageRange = languageRangeAndProperty[0].toLowerCase(Locale.US); // Turkish "I" could create a problem + String shortProperty = languageRangeAndProperty[1]; + String value = props.getProperty(prop); + log.info("Setting language range: " + languageRange + "/" + shortProperty + " = " + value); + ConfigOptionsToAnalyzer cons = analyzers.get(languageRange); + if (cons == null) { + cons = new ConfigOptionsToAnalyzer(languageRange); + analyzers.put(languageRange, cons); + } + if (cons.setProperty(shortProperty, value)) { + continue; + } + } + } + + log.warn("Failed to process configuration property: " + prop); + } + + } + + protected Properties initProperties(final FullTextIndex<?> fullTextIndex) { + final Properties parentProperties = fullTextIndex.getProperties(); + Properties myProps; + if (Boolean.getBoolean(parentProperties.getProperty(Options.INCLUDE_DEFAULTS, Options.DEFAULT_INCLUDE_DEFAULTS))) { + myProps = defaultProperties(); + } else { + myProps = new Properties(); + } + + copyRelevantProperties(fullTextIndex.getProperties(), myProps); + + if (myProps.isEmpty()) { + return defaultProperties(); + } else { + return myProps; + } + } + + protected Properties defaultProperties() { + Properties rslt = new Properties(); + try { + rslt.load(new StringReader(DEFAULT_PROPERTIES)); + } catch (IOException e) { + throw new RuntimeException("Impossible - well clearly not!", e); + } + return rslt; + } + + private void copyRelevantProperties(Properties from, Properties to) { + Enumeration<?> en = from.propertyNames(); + while (en.hasMoreElements()) { + String prop = (String)en.nextElement(); + if (prop.startsWith(ConfigurableAnalyzerFactory.class.getName())) { + to.setProperty(prop, from.getProperty(prop)); + } + } + } + + @Override + public Analyzer getAnalyzer(String languageCode, boolean filterStopwords) { + + if (languageCode == null || languageCode.equals("")) { + languageCode = defaultLanguage; + } + + AnalyzerPair pair = langTag2AnalyzerPair.get(languageCode); + + if (pair == null) { + pair = lookupPair(languageCode); + + // naive cache - clear everything if cache is full + if (langTag2AnalyzerPair.size() == MAX_LANG_CACHE_SIZE) { + langTag2AnalyzerPair.clear(); + } + // there is a race condition below, but we don't care who wins. + langTag2AnalyzerPair.put(languageCode, pair); + } + + return pair.getAnalyzer(filterStopwords); + + } + + private AnalyzerPair lookupPair(String languageCode) { + String language[] = languageCode.split("-"); + for (AnalyzerPair p: config) { + if (p.extendedFilterMatch(language)) { + return p; + } + } + throw new RuntimeException("Impossible - supposedly - did not match '*'"); + } +} Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/EmptyAnalyzer.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/EmptyAnalyzer.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/search/EmptyAnalyzer.java 2014-05-07 15:39:17 UTC (rev 8221) @@ -0,0 +1,49 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* + * Created on May 6, 2014 by Jeremy J. Carroll, Syapse Inc. + */ +package com.bigdata.search; + +import java.io.Reader; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.miscellaneous.EmptyTokenStream; + +/** + * An analyzer that always returns an {@link EmptyTokenStream}, this can + * be used with {@link ConfigurableAnalyzerFactory} + * to switch off indexing and searching for specific language tags. + * @author jeremycarroll + * + */ +public class EmptyAnalyzer extends Analyzer { + + @Override + public TokenStream tokenStream(String arg0, Reader arg1) { + return new EmptyTokenStream(); + } + +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jer...@us...> - 2014-05-07 15:39:10
|
Revision: 8220 http://sourceforge.net/p/bigdata/code/8220 Author: jeremy_carroll Date: 2014-05-07 15:39:05 +0000 (Wed, 07 May 2014) Log Message: ----------- delete spurious character and ensure that the copyright symbol does not prevent the javadoc target from completing. Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HALoadBalancerServlet.java branches/BIGDATA_RELEASE_1_3_0/build.xml Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HALoadBalancerServlet.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HALoadBalancerServlet.java 2014-05-07 15:31:17 UTC (rev 8219) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HALoadBalancerServlet.java 2014-05-07 15:39:05 UTC (rev 8220) @@ -79,7 +79,7 @@ * followers in a manner that reflects the CPU, IO Wait, and GC Time associated * with each service. * <p> - * The {@link PlatformStatsPlugIn}\xCA and {@link GangliaPlugIn} MUST be enabled + * The {@link PlatformStatsPlugIn} and {@link GangliaPlugIn} MUST be enabled * for the default load balancer policy to operate. It depends on those plugins * to maintain a model of the load on the HA replication cluster. The * GangliaPlugIn should be run only as a listener if you are are running the Modified: branches/BIGDATA_RELEASE_1_3_0/build.xml =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/build.xml 2014-05-07 15:31:17 UTC (rev 8219) +++ branches/BIGDATA_RELEASE_1_3_0/build.xml 2014-05-07 15:39:05 UTC (rev 8220) @@ -394,6 +394,7 @@ overview="${bigdata.dir}/overview.html" windowtitle="bigdata® v${build.ver}" classpathref="build.classpath" + encoding="utf-8" private="false" > <arg value="-J-Xmx1000m" /> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-07 15:31:20
|
Revision: 8219 http://sourceforge.net/p/bigdata/code/8219 Author: dmekonnen Date: 2014-05-07 15:31:17 +0000 (Wed, 07 May 2014) Log Message: ----------- attribute simplification and version bump Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/metadata.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb 2014-05-07 14:53:21 UTC (rev 8218) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb 2014-05-07 15:31:17 UTC (rev 8219) @@ -3,8 +3,8 @@ default['systap-bigdataHA'][:bigdata_group] = "bigdata" # Where to find and build bigdata code -# default['systap-bigdataHA'][:svn] = "https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA_RELEASE_1_3_0" -default['systap-bigdataHA'][:svn] = "https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT_BRANCH_1_3_1" +default['systap-bigdataHA'][:svn] = "https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA_RELEASE_1_3_0" +# default['systap-bigdataHA'][:svn] = "https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT_BRANCH_1_3_1" default['systap-bigdataHA'][:source] = "/home/ubuntu/bigdata-code" # Name of the federation of services (controls the Apache River GROUPS). Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/metadata.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/metadata.rb 2014-05-07 14:53:21 UTC (rev 8218) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/metadata.rb 2014-05-07 15:31:17 UTC (rev 8219) @@ -4,7 +4,7 @@ license 'All rights reserved' description 'Installs/Configures Systap Bigdata High Availability' long_description IO.read(File.join(File.dirname(__FILE__), 'README.txt')) -version '0.1.0' +version '0.1.1' depends 'apt' depends 'java', '>= 1.22.0' Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb 2014-05-07 14:53:21 UTC (rev 8218) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb 2014-05-07 15:31:17 UTC (rev 8219) @@ -21,7 +21,6 @@ action :create end -# directory node['systap-bigdataHA'][:fed_dir] do execute "change the ownership of the bigdata home directory to bigdata, which strangely is not" do user "root" group "root" @@ -58,16 +57,6 @@ end # -# Copy the /etc/init.d/bigdataHA template: -# -# template "/etc/init.d/bigdataHA" do -# source "init.d/bigdataHA.erb" -# user 'root' -# group 'root' -# mode 00755 -# end - -# # Create the log directory for bigdata: # directory node['systap-bigdataHA'][:log_dir] do @@ -90,20 +79,19 @@ # # Install the log4jHA.properties file: # -template "#{node['systap-bigdataHA'][:fed_dir]}/var/jetty/jetty.xml" do +template "#{node['systap-bigdataHA'][:jetty_dir]}/jetty.xml" do source "jetty.xml.erb" owner 'bigdata' group 'bigdata' mode 00644 end - # # Set the absolute path to the RWStore.properties file # execute "set absolute path to RWStore.properties" do - cwd "#{node['systap-bigdataHA'][:fed_dir]}/var/jetty/WEB-INF" - command "sed -i 's|<param-value>WEB-INF/RWStore.properties|<param-value>#{node['systap-bigdataHA'][:fed_dir]}/var/jetty/WEB-INF/RWStore.properties|' web.xml" + cwd "#{node['systap-bigdataHA'][:jetty_dir]}/WEB-INF" + command "sed -i 's|<param-value>WEB-INF/RWStore.properties|<param-value>#{node['systap-bigdataHA'][:jetty_dir]}/WEB-INF/RWStore.properties|' web.xml" end # This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-07 14:53:26
|
Revision: 8218 http://sourceforge.net/p/bigdata/code/8218 Author: dmekonnen Date: 2014-05-07 14:53:21 +0000 (Wed, 07 May 2014) Log Message: ----------- validated resync with reboot elimination Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/setHosts.py branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/default/test/default_test.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/jetty.xml.erb Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb 2014-05-07 14:17:24 UTC (rev 8217) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb 2014-05-07 14:53:21 UTC (rev 8218) @@ -3,7 +3,8 @@ default['systap-bigdataHA'][:bigdata_group] = "bigdata" # Where to find and build bigdata code -default['systap-bigdataHA'][:svn] = "https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA_RELEASE_1_3_0" +# default['systap-bigdataHA'][:svn] = "https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA_RELEASE_1_3_0" +default['systap-bigdataHA'][:svn] = "https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT_BRANCH_1_3_1" default['systap-bigdataHA'][:source] = "/home/ubuntu/bigdata-code" # Name of the federation of services (controls the Apache River GROUPS). @@ -18,6 +19,9 @@ # Where the log files will live: default['systap-bigdataHA'][:log_dir] = node['systap-bigdataHA'][:fed_dir] + "/log" +# Where the jetty resourceBase is defined: +default['systap-bigdataHA'][:jetty_dir] = node['systap-bigdataHA'][:fed_dir] + "/var/jetty" + # Name of the replication cluster to which this HAJournalServer will belong. default['systap-bigdataHA'][:logical_service_id] = 'HA-Replication-Cluster-1' Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/setHosts.py =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/setHosts.py 2014-05-07 14:17:24 UTC (rev 8217) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/setHosts.py 2014-05-07 14:53:21 UTC (rev 8218) @@ -87,9 +87,10 @@ # startHAServices does not exit as expected, so remote restart commands will hang. # As a work around, we restart the host: # - # status, stdin, stderr = ssh_client.run( "sudo /etc/init.d/bigdataHA restart" ) + status, stdin, stderr = ssh_client.run( "sudo /etc/init.d/zookeeper-server restart" ) + status, stdin, stderr = ssh_client.run( "sudo /etc/init.d/bigdataHA restart" ) # status, stdin, stderr = ssh_client.run( "sudo service bigdataHA restart" ) - host.reboot() + # host.reboot() print "The hosts are now rebooting, this may take several minutes. \nOnce back up, you may confirm status by visiting:\n" for host in bigdataHosts: Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/default/test/default_test.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/default/test/default_test.rb 2014-05-07 14:17:24 UTC (rev 8217) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/default/test/default_test.rb 2014-05-07 14:53:21 UTC (rev 8218) @@ -1,6 +1,6 @@ require 'minitest/spec' describe_recipe 'systap-bigdata::test' do - it "is running the tomcat server" do - service('tomcat').must_be_running + it "is running the bigdataHA server" do + service('bigdataHA').must_be_running end end Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/jetty.xml.erb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/jetty.xml.erb 2014-05-07 14:17:24 UTC (rev 8217) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/jetty.xml.erb 2014-05-07 14:53:21 UTC (rev 8218) @@ -40,6 +40,7 @@ <!-- =========================================================== --> <!-- Initialize the Jetty MBean container --> <!-- =========================================================== --> + <!-- Note: This breaks CI if it is enabled <Call name="addBean"> <Arg> <New id="MBeanContainer" class="org.eclipse.jetty.jmx.MBeanContainer"> @@ -48,16 +49,16 @@ </Arg> </New> </Arg> - </Call> + </Call>--> - <!-- Add the static log to the MBean server. --> + <!-- Add the static log to the MBean server. <Call name="addBean"> <Arg> <New class="org.eclipse.jetty.util.log.Log" /> </Arg> - </Call> + </Call>--> - <!-- For remote MBean access (optional) --> + <!-- For remote MBean access (optional) <New id="ConnectorServer" class="org.eclipse.jetty.jmx.ConnectorServer"> <Arg> <New class="javax.management.remote.JMXServiceURL"> @@ -69,7 +70,7 @@ </Arg> <Arg>org.eclipse.jetty.jmx:name=rmiconnectorserver</Arg> <Call name="start" /> - </New> + </New>--> <!-- =========================================================== --> <!-- Http Configuration. --> @@ -121,19 +122,18 @@ <!-- =========================================================== --> <!-- Set handler Collection Structure --> <!-- =========================================================== --> + <!-- Recommended approach: does not work for HA CI test suite. <Set name="handler"> <New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection"> <Set name="handlers"> <Array type="org.eclipse.jetty.server.Handler"> <Item> - <!-- This is the bigdata web application. --> <New id="WebAppContext" class="org.eclipse.jetty.webapp.WebAppContext"> <Set name="war"> - <!-- The location of the top-level of the bigdata webapp. --> - <Property name="jetty.resourceBase" default="<%= node['systap-bigdataHA'][:fed_dir] %>/var/jetty" /> + <SystemProperty name="jetty.resourceBase" default="bigdata-war/src" /> </Set> <Set name="contextPath">/bigdata</Set> - <Set name="descriptor"><%= node['systap-bigdataHA'][:fed_dir] %>/var/jetty/WEB-INF/web.xml</Set> + <Set name="descriptor">WEB-INF/web.xml</Set> <Set name="parentLoaderPriority">true</Set> <Set name="extractWAR">false</Set> </New> @@ -141,6 +141,47 @@ </Array> </Set> </New> + </Set> --> + <Set name="handler"> + <New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection"> + <Set name="handlers"> + <Array type="org.eclipse.jetty.server.Handler"> + <Item> + <!-- This is the bigdata web application. --> + <New id="WebAppContext" class="org.eclipse.jetty.webapp.WebAppContext"> + <Set name="resourceBase"> + <!-- The location of the top-level of the bigdata webapp. --> + <Property name="jetty.resourceBase" default="<%= node['systap-bigdataHA'][:jetty_dir] %>" /> + </Set> + <Set name="contextPath">/bigdata</Set> + <Set name="descriptor">WEB-INF/web.xml</Set> + <Set name="descriptor"><%= node['systap-bigdataHA'][:jetty_dir] %>/WEB-INF/web.xml</Set> + <Set name="parentLoaderPriority">true</Set> + <Set name="extractWAR">false</Set> + </New> + </Item> + <Item> + <!-- This appears to be necessary in addition to the above. --> + <!-- Without this, it will not resolve http://localhost:8080/ --> + <!-- and can fail to deliver some of the static content. --> + <New id="ResourceHandler" class="org.eclipse.jetty.server.handler.ResourceHandler"> + <Set name="resourceBase"> + <!-- The location of the top-level of the bigdata webapp. --> + <Property name="jetty.resourceBase" default="<%= node['systap-bigdataHA'][:jetty_dir] %>" /> + </Set> + <Set name="welcomeFiles"> + <Array type="java.lang.String"> + <Item>html/index.html</Item> + </Array> + </Set> + </New> + </Item> + <!-- <Item> + <New id="DefaultHandler" class="org.eclipse.jetty.server.handler.DefaultHandler"></New> + </Item> --> + </Array> + </Set> + </New> </Set> <!-- =========================================================== --> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-07 14:17:25
|
Revision: 8217 http://sourceforge.net/p/bigdata/code/8217 Author: dmekonnen Date: 2014-05-07 14:17:24 +0000 (Wed, 07 May 2014) Log Message: ----------- missed during resync Added Paths: ----------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/bigdata-ganglia-1.0.2.jar Added: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/bigdata-ganglia-1.0.2.jar =================================================================== (Binary files differ) Index: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/bigdata-ganglia-1.0.2.jar =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/bigdata-ganglia-1.0.2.jar 2014-05-07 14:16:32 UTC (rev 8216) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/bigdata-ganglia-1.0.2.jar 2014-05-07 14:17:24 UTC (rev 8217) Property changes on: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/bigdata-ganglia-1.0.2.jar ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +application/octet-stream \ No newline at end of property This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2014-05-07 14:16:35
|
Revision: 8216 http://sourceforge.net/p/bigdata/code/8216 Author: thompsonbry Date: 2014-05-07 14:16:32 +0000 (Wed, 07 May 2014) Log Message: ----------- Identified a problem with the GangliaLBSPolicy where bigdata and bigdata-ganglia use the canonical (fully qualified) hostname and ganglia uses the local name of the host. This means that the host metrics are not being obtained by the GangliaLBSPolicy. While it is possible to override the hostname for ganglia starting with 3.2.x, this is quite a pain and could involve full restarts of gmond on all machines in the cluster. I have not yet resolved this issue, but I have added the ability to force the bigdata-ganglia implementation to use a hostname specified in an environment variable. Added the ability to override the hostname for bigdata-ganglia using the com.bigdata.hostname environment variable per [1]. Updated the pom.xml and build.properties files for the bigdata-ganglia-1.0.3 release. Published that release to our maven repo. [1] http://trac.bigdata.com/ticket/886 (Provide workaround for bad reverse DNS setups) Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/.classpath branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/BigdataStatics.java branches/BIGDATA_RELEASE_1_3_0/bigdata-ganglia/build.properties branches/BIGDATA_RELEASE_1_3_0/bigdata-ganglia/src/java/com/bigdata/ganglia/GangliaService.java branches/BIGDATA_RELEASE_1_3_0/build.properties branches/BIGDATA_RELEASE_1_3_0/pom.xml Added Paths: ----------- branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.3.jar Removed Paths: ------------- branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.2.jar Modified: branches/BIGDATA_RELEASE_1_3_0/.classpath =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/.classpath 2014-05-07 13:57:17 UTC (rev 8215) +++ branches/BIGDATA_RELEASE_1_3_0/.classpath 2014-05-07 14:16:32 UTC (rev 8216) @@ -86,10 +86,10 @@ <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-proxy-9.1.4.v20140401.jar" sourcepath="/Users/bryan/Downloads/org.eclipse.jetty.project-jetty-9.1.4.v20140401"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-rewrite-9.1.4.v20140401.jar"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-security-9.1.4.v20140401.jar"/> - <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-server-9.1.4.v20140401.jar"/> + <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-server-9.1.4.v20140401.jar" sourcepath="/Users/bryan/Downloads/org.eclipse.jetty.project-jetty-9.1.4.v20140401"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-servlet-9.1.4.v20140401.jar"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-util-9.1.4.v20140401.jar"/> - <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-webapp-9.1.4.v20140401.jar"/> + <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-webapp-9.1.4.v20140401.jar" sourcepath="/Users/bryan/Downloads/org.eclipse.jetty.project-jetty-9.1.4.v20140401"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/jetty/jetty-xml-9.1.4.v20140401.jar"/> <classpathentry exported="true" kind="lib" path="bigdata-sails/lib/jackson-core-2.2.3.jar"/> <classpathentry kind="output" path="bin"/> Deleted: branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.2.jar =================================================================== (Binary files differ) Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.3.jar =================================================================== (Binary files differ) Index: branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.3.jar =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.3.jar 2014-05-07 13:57:17 UTC (rev 8215) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.3.jar 2014-05-07 14:16:32 UTC (rev 8216) Property changes on: branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.3.jar ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +application/octet-stream \ No newline at end of property Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/BigdataStatics.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/BigdataStatics.java 2014-05-07 13:57:17 UTC (rev 8215) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/BigdataStatics.java 2014-05-07 14:16:32 UTC (rev 8216) @@ -27,9 +27,6 @@ package com.bigdata; -import com.bigdata.counters.AbstractStatisticsCollector; -import com.bigdata.jini.start.process.ProcessHelper; - /** * A class for those few statics that it makes sense to reference from other * places. @@ -49,29 +46,31 @@ /** * The name of an environment variable whose value will be used as the * canoncial host name for the host running this JVM. This information is - * used by the {@link AbstractStatisticsCollector}, which is responsible for - * obtaining and reporting the canonical hostname for the {@link Banner} and - * other purposes. + * used by the {@link com.bigdata.counters.AbstractStatisticsCollector}, + * which is responsible for obtaining and reporting the canonical hostname + * for the {@link Banner} and other purposes. * - * @see AbstractStatisticsCollector - * @see Banner + * @see com.bigdata.counters.AbstractStatisticsCollector + * @see com.bigdata.Banner + * @see com.bigdata.ganglia.GangliaService#HOSTNAME * @see <a href="http://trac.bigdata.com/ticket/886" >Provide workaround for * bad reverse DNS setups</a> */ public static final String HOSTNAME = "com.bigdata.hostname"; - + /** * The #of lines of output from a child process which will be echoed onto * {@link System#out} when that child process is executed. This makes it * easy to track down why a child process dies during service start. If you * want to see all output from the child process, then you should set the - * log level for the {@link ProcessHelper} class to INFO. + * log level for the {@link com.bigdata.jini.start.process.ProcessHelper} + * class to INFO. * <p> - * Note: This needs to be more than the length of the {@link Banner} output - * in order for anything related to the process behavior to be echoed on - * {@link System#out}. + * Note: This needs to be more than the length of the + * {@link com.bigdata.Banner} output in order for anything related to the + * process behavior to be echoed on {@link System#out}. * - * @see ProcessHelper + * @see com.bigdata.jini.start.process.ProcessHelper */ public static int echoProcessStartupLineCount = 30;//Integer.MAX_VALUE;//100 Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-ganglia/build.properties =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-ganglia/build.properties 2014-05-07 13:57:17 UTC (rev 8215) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-ganglia/build.properties 2014-05-07 14:16:32 UTC (rev 8216) @@ -38,7 +38,7 @@ release.dir=ant-release # The build version. -build.ver=1.0.2 +build.ver=1.0.3 # Set true to do a snapshot build. This changes the value of ${version} to # include the date. Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-ganglia/src/java/com/bigdata/ganglia/GangliaService.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-ganglia/src/java/com/bigdata/ganglia/GangliaService.java 2014-05-07 13:57:17 UTC (rev 8215) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-ganglia/src/java/com/bigdata/ganglia/GangliaService.java 2014-05-07 14:16:32 UTC (rev 8216) @@ -1452,27 +1452,50 @@ } - /** - * The name for this host. - */ - public static final String getCanonicalHostName() { - String s; - try { - /* - * Note: This should be the host *name* NOT an IP address of a - * preferred Ethernet adaptor. - */ - s = InetAddress.getLocalHost().getCanonicalHostName(); - } catch (Throwable t) { - log.warn("Could not resolve canonical name for host: " + t); - } - try { - s = InetAddress.getLocalHost().getHostName(); - } catch (Throwable t) { - log.warn("Could not resolve name for host: " + t); - s = "localhost"; - } - return s; + /** + * The name of an environment variable whose value will be used as the + * canoncial host name for the host running this JVM. This information is + * used by the {@link GangliaService}, which is responsible for obtaining + * and reporting the canonical hostname for host metrics reporting. + * + * @see <a href="http://trac.bigdata.com/ticket/886" >Provide workaround for + * bad reverse DNS setups</a> + */ + public static final String HOSTNAME = "com.bigdata.hostname"; + + /** + * The name for this host. + * + * @see #HOSTNAME + * @see <a href="http://trac.bigdata.com/ticket/886" >Provide workaround for + * bad reverse DNS setups</a> + */ + public static final String getCanonicalHostName() { + String s = System.getProperty(HOSTNAME); + if (s != null) { + // Trim whitespace. + s = s.trim(); + } + if (s != null && s.length() != 0) { + log.warn("Hostname override: hostname=" + s); + } else { + try { + /* + * Note: This should be the host *name* NOT an IP address of a + * preferred Ethernet adaptor. + */ + s = InetAddress.getLocalHost().getCanonicalHostName(); + } catch (Throwable t) { + log.warn("Could not resolve canonical name for host: " + t); + } + try { + s = InetAddress.getLocalHost().getHostName(); + } catch (Throwable t) { + log.warn("Could not resolve name for host: " + t); + s = "localhost"; + } + } + return s; } /** Modified: branches/BIGDATA_RELEASE_1_3_0/build.properties =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/build.properties 2014-05-07 13:57:17 UTC (rev 8215) +++ branches/BIGDATA_RELEASE_1_3_0/build.properties 2014-05-07 14:16:32 UTC (rev 8216) @@ -69,7 +69,7 @@ fastutil.version=5.1.5 dsiutils.version=1.0.6-020610 lgplutils.version=1.0.7-270114 -ganglia-version=1.0.2 +ganglia-version=1.0.3 gas-version=0.1.0 jackson-version=2.2.3 Modified: branches/BIGDATA_RELEASE_1_3_0/pom.xml =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/pom.xml 2014-05-07 13:57:17 UTC (rev 8215) +++ branches/BIGDATA_RELEASE_1_3_0/pom.xml 2014-05-07 14:16:32 UTC (rev 8216) @@ -97,7 +97,7 @@ <fastutil.version>5.1.5</fastutil.version> <dsiutils.version>1.0.6-020610</dsiutils.version> <lgplutils.version>1.0.7-270114</lgplutils.version> - <bigdata.ganglia.version>1.0.2</bigdata.ganglia.version> + <bigdata.ganglia.version>1.0.3</bigdata.ganglia.version> <jackson.version>2.2.3</jackson.version> </properties> <!-- TODO Can we declare the versions of the dependencies here as @@ -364,15 +364,15 @@ mvn deploy:deploy-file \ -DgroupId=com.bigdata \ -DartifactId=bigdata-ganglia \ - -Dversion=1.0.2 \ + -Dversion=1.0.3 \ -Dpackaging=jar \ -DrepositoryId=bigdata.releases \ -Durl=scpexe://www.systap.com/srv/www/htdocs/systap.com/maven/releases/ \ - -Dfile=bigdata/lib/bigdata-ganglia-1.0.2.jar + -Dfile=bigdata/lib/bigdata-ganglia-1.0.3.jar --> <groupId>com.bigdata</groupId> <artifactId>bigdata-ganglia</artifactId> - <version>1.0.1</version> + <version>${bigdata.ganglia.version}</version> <optional>true</optional> </dependency> <!-- This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-07 13:57:23
|
Revision: 8215 http://sourceforge.net/p/bigdata/code/8215 Author: dmekonnen Date: 2014-05-07 13:57:17 +0000 (Wed, 07 May 2014) Log Message: ----------- resync with BIGDATA_RELEASE_1_3_0 branch Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/.classpath branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/BOp.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/CoreBaseBOp.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/IValueExpression.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/controller/ServiceCallJoin.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-ganglia/build.properties branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-A.config branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-B.config branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-C.config branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournalServer.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3JournalServerTestCase.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3LoadBalancerTestCase.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-A.config branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-B.config branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-C.config branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-D.config branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-E.config branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournalTest.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_GangliaLBS.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_NOP.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_RoundRobin.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/log4j-template-A.properties branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/zkClient.config branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/FilterNode.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/FunctionNode.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/IQueryNode.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/IValueExpressionNode.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/QueryBase.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StatementPatternNode.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/SubqueryFunctionNodeBase.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/TermNode.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/ValueExpressionNode.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/ASTUnionFiltersOptimizer.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/AbstractJoinGroupOptimizer.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/TestAST.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/optimizers/AbstractOptimizerTestCase.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/optimizers/TestALPPinTrac773.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/optimizers/TestASTUnionFiltersOptimizer.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/optimizers/TestAll.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataServlet.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HALoadBalancerServlet.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HAStatusServletUtil.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/NanoSparqlServer.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractLBSPolicy.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/ServiceScore.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/NOPLBSPolicy.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/RoundRobinLBSPolicy.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/ganglia/GangliaLBSPolicy.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/ganglia/HostTable.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-war/src/WEB-INF/web.xml branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-war/src/html/css/style.css branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-war/src/html/index.html branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-war/src/html/js/workbench.js branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-war/src/jetty.xml branches/DEPLOYMENT_BRANCH_1_3_1/build.properties branches/DEPLOYMENT_BRANCH_1_3_1/build.xml branches/DEPLOYMENT_BRANCH_1_3_1/pom.xml branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/HAJournal/HAJournal.config branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/bin/startHAServices Added Paths: ----------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jmx-9.1.4.v20140401.jar branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jndi-9.1.4.v20140401.jar branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/optimizers/TestASTExistsAndJoinOrderByTypeOptimizers.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/optimizers/TestASTPropertyPathOptimizer.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/DefaultHARequestURIRewriter.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/IHALoadBalancerPolicy.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/IHAPolicyLifeCycle.java branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/IHARequestURIRewriter.java Modified: branches/DEPLOYMENT_BRANCH_1_3_1/.classpath =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/.classpath 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/.classpath 2014-05-07 13:57:17 UTC (rev 8215) @@ -58,6 +58,8 @@ <classpathentry exported="true" kind="lib" path="bigdata/lib/unimi/fastutil-5.1.5.jar"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/lucene/lucene-analyzers-3.0.0.jar"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/lucene/lucene-core-3.0.0.jar"/> + <classpathentry kind="lib" path="bigdata/lib/jetty/jetty-jmx-9.1.4.v20140401.jar"/> + <classpathentry kind="lib" path="bigdata/lib/jetty/jetty-jndi-9.1.4.v20140401.jar"/> <classpathentry exported="true" kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/high-scale-lib-v1.1.2.jar"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/junit-ext-1.1-b3-dev.jar"/> Added: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jmx-9.1.4.v20140401.jar =================================================================== (Binary files differ) Index: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jmx-9.1.4.v20140401.jar =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jmx-9.1.4.v20140401.jar 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jmx-9.1.4.v20140401.jar 2014-05-07 13:57:17 UTC (rev 8215) Property changes on: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jmx-9.1.4.v20140401.jar ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +application/octet-stream \ No newline at end of property Added: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jndi-9.1.4.v20140401.jar =================================================================== (Binary files differ) Index: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jndi-9.1.4.v20140401.jar =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jndi-9.1.4.v20140401.jar 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jndi-9.1.4.v20140401.jar 2014-05-07 13:57:17 UTC (rev 8215) Property changes on: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/lib/jetty/jetty-jndi-9.1.4.v20140401.jar ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +application/octet-stream \ No newline at end of property Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/BOp.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/BOp.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/BOp.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -211,6 +211,24 @@ */ boolean isController(); + /** + * The contract of this method at this level is under-specified. + * Sub-classes may choose between: + * + * - return a string representation of the object, similar to the use of {@link #toString()} + * + * Or: + * + * - return a pretty-print representation of the object with indent + * + * Note that the former contract may or may not include recursive descent through a tree-like + * object, whereas the latter almost certainly does. + * + * @param indent + * @return + */ + String toString(final int indent); + /** * Interface declaring well known annotations. * <p> Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/CoreBaseBOp.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/CoreBaseBOp.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/CoreBaseBOp.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -156,9 +156,44 @@ return sb.toString(); } + + /** + * Append a name to a string buffer, possibly shortening the name. + * The current algorithm for name shortening is to take the end of the name + * after the pen-ultimate '.'. + * @param sb + * @param longishName + */ + protected void shortenName(final StringBuilder sb, final String longishName) { + int lastDot = longishName.lastIndexOf('.'); + if (lastDot != -1) { + int lastButOneDot = longishName.lastIndexOf('.', lastDot - 1); + sb.append(longishName.substring(lastButOneDot + 1)); + return; + } + sb.append(longishName); + } + /** + * Add a string representation of annotations into a string builder. + * By default this is a non-recursive operation, however + * subclasses may override {@link #annotationValueToString(StringBuilder, BOp, int)} + * in order to make this recursive. + * @param sb + */ protected void annotationsToString(final StringBuilder sb) { - final Map<String,Object> annotations = annotations(); + annotationsToString(sb, 0); + } + + /** + * Add a string representation of annotations into a string builder. + * By default this is a non-recursive operation, however + * subclasses may override {@link #annotationValueToString(StringBuilder, BOp, int)} + * in order to make this recursive. + * @param sb + */ + protected void annotationsToString(final StringBuilder sb, final int indent) { + final Map<String,Object> annotations = annotations(); if (!annotations.isEmpty()) { sb.append("["); boolean first = true; @@ -169,20 +204,35 @@ sb.append(", "); final String key = e.getKey(); final Object val = e.getValue(); + shortenName(sb, key); + sb.append("="); if (val != null && val.getClass().isArray()) { - sb.append(key + "=" + Arrays.toString((Object[]) val)); + sb.append(Arrays.toString((Object[]) val)); } else if (key.equals(IPredicate.Annotations.FLAGS)) { - sb.append(key + "=" + Tuple.flagString((Integer) val)); + sb.append(Tuple.flagString((Integer) val)); } else if( val instanceof BOp) { - sb.append(key + "=" + ((BOp) val).toShortString()); + annotationValueToString(sb, (BOp)val, indent); } else { - sb.append(key + "=" + val); + sb.append(val); } first = false; } sb.append("]"); } - } + } + + /** + * Add a string representation of a BOp annotation value into a string builder. + * By default this is a non-recursive operation, however + * subclasses may override and give a recursive definition, which should respect + * the given indent. + * @param sb The destination buffer + * @param val The BOp to serialize + * @param indent An indent to use if a recursive approach is chosen. + */ + protected void annotationValueToString(final StringBuilder sb, final BOp val, final int indent) { + sb.append(val.toString()); + } @Override final public Object getRequiredProperty(final String name) { @@ -363,7 +413,7 @@ if(v1 == v2) continue; - if (v1 != null && v2 == null) + if (v1 == null || v2 == null) return false; if (v1.getClass().isArray()) { @@ -441,6 +491,26 @@ } + /** + * The contract of this method at this level is under-specified. + * Sub-classes may choose between: + * + * - return a string representation of the object, similar to the use of {@link #toString()} + * + * Or: + * + * - return a pretty-print representation of the object with indent + * + * Note that the former contract may or may not include recursive descent through a tree-like + * object, whereas the latter almost certainly does. + * + * @param indent + * @return + */ + public String toString(int indent) { + return toString(); + } + private static final transient String ws = " "; } Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/IValueExpression.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/IValueExpression.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/IValueExpression.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -26,5 +26,11 @@ * <code>null</code>. */ E get(IBindingSet bindingSet); + /** + * A string representation of a recursive structure with pretty-print indent. + * @param indent + * @return + */ + String toString(int indent); } Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/controller/ServiceCallJoin.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/controller/ServiceCallJoin.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/java/com/bigdata/bop/controller/ServiceCallJoin.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -58,7 +58,6 @@ import com.bigdata.rdf.model.BigdataURI; import com.bigdata.rdf.sparql.ast.service.BigdataServiceCall; import com.bigdata.rdf.sparql.ast.service.ExternalServiceCall; -import com.bigdata.rdf.sparql.ast.service.IDoNotJoinService; import com.bigdata.rdf.sparql.ast.service.RemoteServiceCall; import com.bigdata.rdf.sparql.ast.service.ServiceCall; import com.bigdata.rdf.sparql.ast.service.ServiceCallUtility; @@ -586,52 +585,6 @@ : new UnsyncLocalOutputBuffer<IBindingSet>( op.getChunkCapacity(), sink2); - if (serviceCall instanceof IDoNotJoinService) { - - // The iterator draining the subquery - ICloseableIterator<IBindingSet[]> serviceSolutionItr = null; - try { - - /* - * Invoke the service. - * - * Note: Returns [null] IFF SILENT and SERVICE ERROR. - */ - - serviceSolutionItr = doServiceCall(serviceCall, chunk); - - if (serviceSolutionItr != null) { - - while (serviceSolutionItr.hasNext()) { - - final IBindingSet[] bsets = - serviceSolutionItr.next(); - - for (IBindingSet bs : bsets) { - - unsyncBuffer.add(bs); - - } - - } - - } - - } finally { - - // ensure the service call iterator is closed. - if (serviceSolutionItr != null) - serviceSolutionItr.close(); - - } - - unsyncBuffer.flush(); - - // done. - return null; - - } - final JVMHashJoinUtility state = new JVMHashJoinUtility(op, silent ? JoinTypeEnum.Optional : JoinTypeEnum.Normal ); Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb 2014-05-07 13:57:17 UTC (rev 8215) @@ -33,7 +33,7 @@ user 'ubuntu' group 'ubuntu' cwd "/home/ubuntu" - command "svn checkout https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA_RELEASE_1_3_0 #{node['systap-bigdataHA'][:source]}" + command "svn checkout #{node['systap-bigdataHA'][:svn]} #{node['systap-bigdataHA'][:source]}" end execute "ant deploy-artifact" do @@ -90,7 +90,7 @@ # # Install the log4jHA.properties file: # -template "#{node['systap-bigdataHA'][:fed_dir]}/var/jetty/WEB-INF/jetty.xml" do +template "#{node['systap-bigdataHA'][:fed_dir]}/var/jetty/jetty.xml" do source "jetty.xml.erb" owner 'bigdata' group 'bigdata' Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-ganglia/build.properties =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-ganglia/build.properties 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-ganglia/build.properties 2014-05-07 13:57:17 UTC (rev 8215) @@ -38,7 +38,7 @@ release.dir=ant-release # The build version. -build.ver=1.0.1 +build.ver=1.0.2 # Set true to do a snapshot build. This changes the value of ${version} to # include the date. Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-A.config =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-A.config 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-A.config 2014-05-07 13:57:17 UTC (rev 8215) @@ -60,6 +60,9 @@ private static fedname = "benchmark"; + // The RMI port for the HAGlue interface (may be ZERO for a random port). + private static rmiPort = 9080; + // write replication pipeline port (listener). private static haPort = 9090; @@ -250,6 +253,9 @@ replicationFactor = bigdata.replicationFactor; + exporter = new BasicJeriExporter(TcpServerEndpoint.getInstance(bigdata.rmiPort), + new BasicILFactory()); + } /* Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-B.config =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-B.config 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-B.config 2014-05-07 13:57:17 UTC (rev 8215) @@ -60,9 +60,9 @@ private static fedname = "benchmark"; - // NanoSparqlServer (http) port. - private static nssPort = ConfigMath.add(8090,1); - + // The RMI port for the HAGlue interface (may be ZERO for a random port). + private static rmiPort = ConfigMath.add(9080,1); + // write replication pipeline port (listener). private static haPort = ConfigMath.add(9090,1); @@ -252,6 +252,9 @@ replicationFactor = bigdata.replicationFactor; + exporter = new BasicJeriExporter(TcpServerEndpoint.getInstance(bigdata.rmiPort), + new BasicILFactory()); + } /* Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-C.config =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-C.config 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournal-C.config 2014-05-07 13:57:17 UTC (rev 8215) @@ -60,6 +60,9 @@ private static fedname = "benchmark"; + // The RMI port for the HAGlue interface (may be ZERO for a random port). + private static rmiPort = ConfigMath.add(9080,1); + // write replication pipeline port (listener). private static haPort = ConfigMath.add(9090,2); @@ -249,6 +252,9 @@ replicationFactor = bigdata.replicationFactor; + exporter = new BasicJeriExporter(TcpServerEndpoint.getInstance(bigdata.rmiPort), + new BasicILFactory()); + } /* Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournalServer.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournalServer.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournalServer.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -104,8 +104,8 @@ import com.bigdata.rdf.sail.CreateKBTask; import com.bigdata.rdf.sail.webapp.ConfigParams; import com.bigdata.rdf.sail.webapp.HALoadBalancerServlet; -import com.bigdata.rdf.sail.webapp.IHALoadBalancerPolicy; import com.bigdata.rdf.sail.webapp.NanoSparqlServer; +import com.bigdata.rdf.sail.webapp.lbs.IHALoadBalancerPolicy; import com.bigdata.rwstore.RWStore; import com.bigdata.service.AbstractHATransactionService; import com.bigdata.service.jini.FakeLifeCycle; @@ -756,7 +756,20 @@ final Quorum<HAGlue, QuorumService<HAGlue>> quorum = (Quorum) new ZKQuorumImpl<HAGlue, HAQuorumService<HAGlue, HAJournal>>( replicationFactor); - // The HAJournal. + /** + * The HAJournal. + * + * FIXME This step can block for a long time if we have a lot of + * HALogs to scan. While it blocks, the REST API (including the LBS) + * is down. This means that client requests to the service end point + * can not be proxied to a service that is online. The problem is + * the interaction with the BigdataRDFServletContextListener which + * needs to (a) set the IIndexManager on the ServletContext; and (b) + * initiate the default KB create (if it is the quorum leader). + * + * @see <a href="http://trac.bigdata.com/ticket/775" > HAJournal + * start() (optimization) </a> + */ this.journal = newHAJournal(this, config, quorum); } @@ -4536,6 +4549,13 @@ // Start the server. jettyServer.start(); + if (Boolean.getBoolean("jetty.dump.start")) { + + // Support the jetty dump-after-start semantics. + log.warn(jettyServer.dump()); + + } + /* * Report *an* effective URL of this service. * @@ -4636,7 +4656,7 @@ if (log.isInfoEnabled()) log.info("Will set LBS: wac=" + wac + ", policy: " + policy); - HALoadBalancerServlet.setPolicy(wac.getServletContext(), policy); + HALoadBalancerServlet.setLBSPolicy(wac.getServletContext(), policy); } Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3JournalServerTestCase.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3JournalServerTestCase.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3JournalServerTestCase.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -2137,6 +2137,13 @@ private final String serviceName; private final UUID serviceId; private final int jettyPort; + /** + * The value of this environment variable is passed down. You can set + * this environment variable to force jetty to dump its internal start + * after start. + */ + private final boolean jettyDumpStart = Boolean + .getBoolean("jetty.dump.start"); // private final File serviceDir; private final String[] args; @@ -2230,6 +2237,12 @@ private final String JETTY_RESOURCE_BASE = "jetty.resourceBase"; /** + * Used to override the <code>jetty.dump.start</code> environment + * property. + */ + private final String TEST_JETTY_DUMP_START = "jetty.dump.start"; + + /** * The absolute effective path of the service directory. This is * overridden on the {@link #TEST_SERVICE_DIR} environment variable * and in the deployed HAJournal.config file in order to have the @@ -2277,6 +2290,9 @@ // Override the location of the webapp as deployed. cmds.add("-D" + JETTY_RESOURCE_BASE + "=\".\""); + // Override the jetty.dump.start. + cmds.add("-D" + TEST_JETTY_DUMP_START + "=" + jettyDumpStart); + super.addCommandArgs(cmds); for (String arg : args) { Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3LoadBalancerTestCase.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3LoadBalancerTestCase.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3LoadBalancerTestCase.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -37,9 +37,9 @@ import com.bigdata.journal.jini.ha.HAJournalServer.HAQuorumService; import com.bigdata.journal.jini.ha.HAJournalTest.HAGlueTest; import com.bigdata.rdf.sail.webapp.HALoadBalancerServlet; -import com.bigdata.rdf.sail.webapp.IHALoadBalancerPolicy; import com.bigdata.rdf.sail.webapp.client.RemoteRepository; import com.bigdata.rdf.sail.webapp.client.RemoteRepository.RemoveOp; +import com.bigdata.rdf.sail.webapp.lbs.IHALoadBalancerPolicy; /** * Test suite for the HA load balancer. Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-A.config =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-A.config 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-A.config 2014-05-07 13:57:17 UTC (rev 8215) @@ -60,6 +60,9 @@ private static fedname = "benchmark"; + // The RMI port for the HAGlue interface (may be ZERO for a random port). + private static rmiPort = 9080; + // write replication pipeline port (listener). private static haPort = 9090; @@ -257,6 +260,9 @@ replicationFactor = bigdata.replicationFactor; + exporter = new BasicJeriExporter(TcpServerEndpoint.getInstance(bigdata.rmiPort), + new BasicILFactory()); + // Use the overridden version of the HAJournal by default so we get the // HAGlueTest API for every test. HAJournalClass = "com.bigdata.journal.jini.ha.HAJournalTest"; Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-B.config =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-B.config 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-B.config 2014-05-07 13:57:17 UTC (rev 8215) @@ -60,6 +60,9 @@ private static fedname = "benchmark"; + // The RMI port for the HAGlue interface (may be ZERO for a random port). + private static rmiPort = ConfigMath.add(9080,1); + // write replication pipeline port (listener). private static haPort = ConfigMath.add(9090,1); @@ -256,6 +259,9 @@ replicationFactor = bigdata.replicationFactor; + exporter = new BasicJeriExporter(TcpServerEndpoint.getInstance(bigdata.rmiPort), + new BasicILFactory()); + // Use the overridden version of the HAJournal by default so we get the // HAGlueTest API for every test. HAJournalClass = "com.bigdata.journal.jini.ha.HAJournalTest"; Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-C.config =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-C.config 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-C.config 2014-05-07 13:57:17 UTC (rev 8215) @@ -60,6 +60,9 @@ private static fedname = "benchmark"; + // The RMI port for the HAGlue interface (may be ZERO for a random port). + private static rmiPort = ConfigMath.add(9080,2); + // write replication pipeline port (listener). private static haPort = ConfigMath.add(9090,2); @@ -256,6 +259,9 @@ replicationFactor = bigdata.replicationFactor; + exporter = new BasicJeriExporter(TcpServerEndpoint.getInstance(bigdata.rmiPort), + new BasicILFactory()); + // Use the overridden version of the HAJournal by default so we get the // HAGlueTest API for every test. HAJournalClass = "com.bigdata.journal.jini.ha.HAJournalTest"; Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-D.config =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-D.config 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-D.config 2014-05-07 13:57:17 UTC (rev 8215) @@ -60,9 +60,9 @@ private static fedname = "benchmark"; - // NanoSparqlServer (http) port. - private static nssPort = ConfigMath.add(8090,3); - + // The RMI port for the HAGlue interface (may be ZERO for a random port). + private static rmiPort = ConfigMath.add(9080,3); + // write replication pipeline port (listener). private static haPort = ConfigMath.add(9090,3); @@ -259,6 +259,9 @@ replicationFactor = bigdata.replicationFactor; + exporter = new BasicJeriExporter(TcpServerEndpoint.getInstance(bigdata.rmiPort), + new BasicILFactory()); + // Use the overridden version of the HAJournal by default so we get the // HAGlueTest API for every test. HAJournalClass = "com.bigdata.journal.jini.ha.HAJournalTest"; Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-E.config =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-E.config 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournal-E.config 2014-05-07 13:57:17 UTC (rev 8215) @@ -60,9 +60,9 @@ private static fedname = "benchmark"; - // NanoSparqlServer (http) port. - private static nssPort = ConfigMath.add(8090,4); - + // The RMI port for the HAGlue interface (may be ZERO for a random port). + private static rmiPort = ConfigMath.add(9080,4); + // write replication pipeline port (listener). private static haPort = ConfigMath.add(9090,4); @@ -259,6 +259,9 @@ replicationFactor = bigdata.replicationFactor; + exporter = new BasicJeriExporter(TcpServerEndpoint.getInstance(bigdata.rmiPort), + new BasicILFactory()); + // Use the overridden version of the HAJournal by default so we get the // HAGlueTest API for every test. HAJournalClass = "com.bigdata.journal.jini.ha.HAJournalTest"; Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournalTest.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournalTest.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/HAJournalTest.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -102,7 +102,7 @@ import com.bigdata.rdf.sail.BigdataSailRepository; import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; import com.bigdata.rdf.sail.webapp.HALoadBalancerServlet; -import com.bigdata.rdf.sail.webapp.IHALoadBalancerPolicy; +import com.bigdata.rdf.sail.webapp.lbs.IHALoadBalancerPolicy; import com.bigdata.rdf.store.AbstractTripleStore; import com.bigdata.service.jini.RemoteDestroyAdmin; Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_GangliaLBS.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_GangliaLBS.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_GangliaLBS.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -23,7 +23,7 @@ */ package com.bigdata.journal.jini.ha; -import com.bigdata.rdf.sail.webapp.IHALoadBalancerPolicy; +import com.bigdata.rdf.sail.webapp.lbs.IHALoadBalancerPolicy; import com.bigdata.rdf.sail.webapp.lbs.policy.ganglia.GangliaLBSPolicy; /** Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_NOP.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_NOP.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_NOP.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -23,7 +23,7 @@ */ package com.bigdata.journal.jini.ha; -import com.bigdata.rdf.sail.webapp.IHALoadBalancerPolicy; +import com.bigdata.rdf.sail.webapp.lbs.IHALoadBalancerPolicy; import com.bigdata.rdf.sail.webapp.lbs.policy.NOPLBSPolicy; /** Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_RoundRobin.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_RoundRobin.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_RoundRobin.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -23,7 +23,7 @@ */ package com.bigdata.journal.jini.ha; -import com.bigdata.rdf.sail.webapp.IHALoadBalancerPolicy; +import com.bigdata.rdf.sail.webapp.lbs.IHALoadBalancerPolicy; import com.bigdata.rdf.sail.webapp.lbs.policy.RoundRobinLBSPolicy; Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/log4j-template-A.properties =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/log4j-template-A.properties 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/log4j-template-A.properties 2014-05-07 13:57:17 UTC (rev 8215) @@ -16,10 +16,21 @@ #log4j.logger.com.bigdata.zookeeper.ZooHelper=ALL log4j.logger.com.bigdata.rdf.internal.LexiconConfiguration=FATAL +# webapp logging. +log4j.logger.com.bigdata.rdf.sail.webapp=ALL +#log4j.logger.com.bigdata.rdf.sail.webapp.RESTServlet=INFO +#log4j.logger.com.bigdata.rdf.sail.webapp.HALoadBalancerServlet=ALL +#log4j.logger.com.bigdata.ganglia.GangliaService=INFO + +# jetty debug logging. +log4j.logger.org.eclipse.jetty=INFO +#log4j.logger.org.eclipse.jetty.client=DEBUG +#log4j.logger.org.eclipse.jetty.proxy=DEBUG + log4j.appender.haLog=org.apache.log4j.FileAppender log4j.appender.haLog.Threshold=ALL # Note: path is relative to the directory in which the service starts. -log4j.appender.haLog.File=halog-B.txt +log4j.appender.haLog.File=halog-A.txt log4j.appender.haLog.Append=true log4j.appender.haLog.layout=org.apache.log4j.PatternLayout log4j.appender.haLog.layout.ConversionPattern=%-5p: %d{HH:mm:ss,SSS} %r %X{hostname} %X{serviceUUID} %X{taskname} %X{timestamp} %X{resources} %t %l: %m%n \ No newline at end of file Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/zkClient.config =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/zkClient.config 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-jini/src/test/com/bigdata/journal/jini/ha/zkClient.config 2014-05-07 13:57:17 UTC (rev 8215) @@ -1,22 +1,6 @@ -/* Zookeeper client only configuration. +/* + * Zookeeper client configuration. */ -import java.io.File; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.util.UUID; - -import com.bigdata.util.NV; -import com.bigdata.util.config.NicUtil; -import com.bigdata.journal.Options; -import com.bigdata.journal.BufferMode; -import com.bigdata.journal.jini.ha.HAJournal; -import com.bigdata.jini.lookup.entry.*; -import com.bigdata.service.IBigdataClient; -import com.bigdata.service.AbstractTransactionService; -import com.bigdata.service.jini.*; -import com.bigdata.service.jini.lookup.DataServiceFilter; -import com.bigdata.service.jini.master.ServicesTemplate; -import com.bigdata.jini.start.config.*; import com.bigdata.jini.util.ConfigMath; import org.apache.zookeeper.ZooDefs; @@ -30,16 +14,6 @@ private static fedname = "benchmark"; - /* The logical service identifier shared by all members of the quorum. - * - * Note: The test fixture ignores this value. For the avoidance of - * doubt, the value is commented out. - */ - //private static logicalServiceId = "CI-HAJournal-1"; - - // zookeeper - static private sessionTimeout = (int)ConfigMath.s2ms(20); - } /* @@ -53,36 +27,16 @@ /* A comma separated list of host:port pairs, where the port is * the CLIENT port for the zookeeper server instance. */ - // standalone. servers = "localhost:2081"; - // ensemble -// servers = bigdata.zoo1+":2181" -// + ","+bigdata.zoo2+":2181" -// + ","+bigdata.zoo3+":2181" -// ; /* Session timeout (optional). */ - sessionTimeout = bigdata.sessionTimeout; + sessionTimeout = (int)ConfigMath.s2ms(20); - /* - * ACL for the zookeeper nodes created by the bigdata federation. - * - * Note: zookeeper ACLs are not transmitted over secure channels - * and are placed into plain text Configuration files by the - * ServicesManagerServer. - */ + // Zookeeper ACLs. acl = new ACL[] { new ACL(ZooDefs.Perms.ALL, new Id("world", "anyone")) }; - /* - * Note: Normally on the HAJournalServer component. Hacked in the test - * suite setup to look at the ZooKeeper component instead. - */ - - logicalServiceId = bigdata.logicalServiceId; - - replicationFactor = bigdata.replicationFactor; } Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/FilterNode.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/FilterNode.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/FilterNode.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -87,7 +87,7 @@ sb.append("\n"); sb.append(indent(indent)); - sb.append("FILTER( ").append(getValueExpressionNode()).append(" )"); + sb.append("FILTER( ").append(getValueExpressionNode().toString(indent+1)).append(" )"); // if (getQueryHints() != null) { // sb.append("\n"); Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/FunctionNode.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/FunctionNode.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/FunctionNode.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -2,6 +2,7 @@ import java.io.Serializable; import java.util.Collections; +import java.util.Iterator; import java.util.Map; import org.openrdf.model.URI; @@ -9,6 +10,7 @@ import com.bigdata.bop.BOp; import com.bigdata.bop.IValueExpression; import com.bigdata.bop.NV; +import com.bigdata.bop.BOp.Annotations; /** * AST node for anything which is neither a constant nor a variable, including @@ -171,7 +173,7 @@ */ /** - * Return <code>t1 AND t2</code> (aka EQ). + * Return <code>t1 AND t2</code>. */ static public FunctionNode AND(final ValueExpressionNode t1, final ValueExpressionNode t2) { @@ -182,7 +184,7 @@ } /** - * Return <code>t1 OR t2</code> (aka EQ). + * Return <code>t1 OR t2</code>. */ static public FunctionNode OR(final ValueExpressionNode t1, final ValueExpressionNode t2) { @@ -295,4 +297,35 @@ } + + /** + * Provides a pretty print representation with recursive descent. + */ + @Override + public String toString(int i) { + + final StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()); + final Integer bopId = (Integer) getProperty(Annotations.BOP_ID); + if (bopId != null) { + sb.append("[" + bopId + "]"); + } + sb.append("("); + int nwritten = 0; + final Iterator<BOp> itr = argIterator(); + while(itr.hasNext()) { + final BOp t = itr.next(); + if (nwritten > 0) + sb.append(','); + if (t == null) { + sb.append("<null>"); + } else { + sb.append(((IValueExpressionNode)t).toString(i+1)); + } + nwritten++; + } + sb.append(")"); + annotationsToString(sb, i); + return sb.toString(); + } } Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/IQueryNode.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/IQueryNode.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/IQueryNode.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -37,8 +37,11 @@ } + /** - * Pretty print with an indent. + * A string representation of a recursive structure with pretty-print indent. + * @param indent + * @return */ String toString(final int indent); Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/IValueExpressionNode.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/IValueExpressionNode.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/IValueExpressionNode.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -34,4 +34,11 @@ */ void invalidate(); + /** + * A string representation of a recursive structure with pretty-print indent. + * @param indent + * @return + */ + String toString(final int indent); + } Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/QueryBase.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/QueryBase.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/QueryBase.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -178,8 +178,13 @@ public void setConstruct(final ConstructNode construct) { setProperty(Annotations.CONSTRUCT, construct); - setQueryType(QueryType.CONSTRUCT); + if (construct != null) { + + setQueryType(QueryType.CONSTRUCT); + + } + } public void setProjection(final ProjectionNode projection) { Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StatementPatternNode.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StatementPatternNode.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/StatementPatternNode.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -574,7 +574,7 @@ if (getQueryHints() != null && !getQueryHints().isEmpty()) { sb.append("\n"); sb.append(indent(indent + 1)); - sb.append(Annotations.QUERY_HINTS); + shortenName(sb, Annotations.QUERY_HINTS); sb.append("="); sb.append(getQueryHints().toString()); } @@ -586,7 +586,7 @@ if (rangeCount != null) { sb.append("\n"); sb.append(indent(indent + 1)); - sb.append(AST2BOpBase.Annotations.ESTIMATED_CARDINALITY); + shortenName(sb, AST2BOpBase.Annotations.ESTIMATED_CARDINALITY); sb.append("="); sb.append(rangeCount.toString()); } @@ -594,7 +594,7 @@ if (keyOrder != null) { sb.append("\n"); sb.append(indent(indent + 1)); - sb.append(AST2BOpBase.Annotations.ORIGINAL_INDEX); + shortenName(sb, AST2BOpBase.Annotations.ORIGINAL_INDEX); sb.append("="); sb.append(keyOrder.toString()); } Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/SubqueryFunctionNodeBase.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/SubqueryFunctionNodeBase.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/SubqueryFunctionNodeBase.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -32,6 +32,7 @@ import org.openrdf.model.URI; import com.bigdata.bop.BOp; +import com.bigdata.bop.IValueExpression; /** * A special function node for modeling value expression nodes which are @@ -119,4 +120,9 @@ } + @Override + protected void annotationValueToString(final StringBuilder sb, final BOp val, int i) { + sb.append(val.toString(i)); + } + } Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/TermNode.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/TermNode.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/TermNode.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -88,5 +88,10 @@ return null; } + + @Override + public String toString(int i) { + return toShortString(); + } } Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/ValueExpressionNode.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/ValueExpressionNode.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/ValueExpressionNode.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -12,7 +12,7 @@ * @author <a href="mailto:tho...@us...">Bryan Thompson</a> * @version $Id$ */ -public class ValueExpressionNode extends ASTBase implements +public abstract class ValueExpressionNode extends ASTBase implements IValueExpressionNode { /** Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/ASTUnionFiltersOptimizer.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/ASTUnionFiltersOptimizer.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/ASTUnionFiltersOptimizer.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -134,6 +134,8 @@ canOptimize = false; + break; + } else { union = (UnionNode) child; @@ -157,6 +159,8 @@ // something else in the group other than a union and filters canOptimize = false; + break; + } } Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/AbstractJoinGroupOptimizer.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/AbstractJoinGroupOptimizer.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/AbstractJoinGroupOptimizer.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -27,6 +27,8 @@ package com.bigdata.rdf.sparql.ast.optimizers; +import java.util.Iterator; + import com.bigdata.bop.BOp; import com.bigdata.bop.IBindingSet; import com.bigdata.bop.bindingSet.ListBindingSet; @@ -188,20 +190,7 @@ final IValueExpressionNode ve = filter.getValueExpressionNode(); - if (ve instanceof SubqueryFunctionNodeBase) { - - final SubqueryFunctionNodeBase subqueryFunction = (SubqueryFunctionNodeBase) ve; - - final GraphPatternGroup<IGroupMemberNode> graphPattern = subqueryFunction - .getGraphPattern(); - - if (graphPattern != null) { - - optimize(ctx, sa, bSets, graphPattern); - - } - - } + optimize(ctx, sa, bSets, ve); } else if (child instanceof ArbitraryLengthPathNode) { @@ -274,6 +263,35 @@ } } + + private void optimize(final AST2BOpContext ctx, final StaticAnalysis sa, + final IBindingSet[] bSets, final IValueExpressionNode ve) { + if (ve instanceof SubqueryFunctionNodeBase) { + + final SubqueryFunctionNodeBase subqueryFunction = (SubqueryFunctionNodeBase) ve; + + final GraphPatternGroup<IGroupMemberNode> graphPattern = subqueryFunction + .getGraphPattern(); + + if (graphPattern != null) { + + optimize(ctx, sa, bSets, graphPattern); + + } + + } else { + Iterator<BOp> it = ((BOp)ve).argIterator(); + while (it.hasNext()) { + + BOp b = it.next(); + if (b instanceof IValueExpressionNode) { + + optimize(ctx, sa, bSets, (IValueExpressionNode)b); + + } + } + } + } /** * Subclasses can do the work of optimizing a join group here. Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/TestAST.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/TestAST.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/TestAST.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -39,6 +39,7 @@ import com.bigdata.bop.Var; import com.bigdata.bop.ap.Predicate; import com.bigdata.journal.ITx; +import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.internal.VTE; import com.bigdata.rdf.internal.constraints.CompareBOp; import com.bigdata.rdf.internal.constraints.IVValueExpression; @@ -262,7 +263,7 @@ groupBy.addExpr(new AssignmentNode(new VarNode("s"), new VarNode("s"))); final HavingNode havingBy = new HavingNode(); - havingBy.addExpr(new ValueExpressionNode(new CompareBOp(Var.var("x"), + havingBy.addExpr(new LegacyTestValueExpressionNode(new CompareBOp(Var.var("x"), Var.var("y"), CompareOp.GT))); final OrderByNode orderBy = new OrderByNode(); @@ -331,7 +332,7 @@ } public FilterNode filter(final int id) { - return new FilterNode(new ValueExpressionNode(new Filter(id))); + return new FilterNode(new LegacyTestValueExpressionNode(new Filter(id))); } public Predicate pred(final int id) { @@ -345,8 +346,23 @@ return new Filter(id); } - - private static final class Filter extends XSDBooleanIVValueExpression { + /** + * @deprecated This was just for compatibility with SOp2ASTUtility. It is + * only used by the test suite now. + */ + @Deprecated + private static final class LegacyTestValueExpressionNode extends ValueExpressionNode { + private LegacyTestValueExpressionNode(IValueExpression<? extends IV> ve) { + super(ve); + } + + @Override + public String toString(int i) { + return toShortString(); + } + } + + private static final class Filter extends XSDBooleanIVValueExpression { /** * Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/optimizers/AbstractOptimizerTestCase.java =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/optimizers/AbstractOptimizerTestCase.java 2014-05-07 13:29:39 UTC (rev 8214) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/optimizers/AbstractOptimizerTestCase.java 2014-05-07 13:57:17 UTC (rev 8215) @@ -23,25 +23,29 @@ */ package com.bigdata.rdf.sparql.ast.optimizers; -import java.util.HashMap; -import java.util.Map; import org.openrdf.model.impl.URIImpl; import org.openrdf.query.algebra.StatementPattern.Scope; import com.bigdata.bop.IBindingSet; +import com.bigdata.bop.IValueExpression; +import com.bigdata.bop.IVariable; import com.bigdata.bop.ModifiableBOpBase; +import com.bigdata.journal.ITx; import com.bigdata.rdf.internal.IV; import com.bigdata.rdf.internal.constraints.IsBoundBOp; +import com.bigdata.rdf.internal.constraints.OrBOp; import com.bigdata.rdf.sparql.ast.ASTBase; import com.bigdata.rdf.sparql.ast.ASTContainer; import com.bigdata.rdf.sparql.ast.AbstractASTEvaluationTestCase; import com.bigdata.rdf.sparql.ast.ArbitraryLengthPathNode; import com.bigdata.rdf.sparql.ast.AssignmentNode; import com.bigdata.rdf.sparql.ast.ConstantNode; +import com.bigdata.rdf.sparql.ast.ExistsNode; import com.bigdata.rdf.sparql.ast.FilterNode; import com.bigdata.rdf.sparql.ast.FunctionNode; import com.bigdata.rdf.sparql.ast.FunctionRegistry; +import com.bigdata.rdf.sparql.ast.GlobalAnnotations; import com.bigdata.rdf.sparql.ast.GraphPatternGroup; import com.bigdata.rdf.sparql.ast.GroupMemberNodeBase; import com.bigdata.rdf.sparql.ast.IGroupMemberNode; @@ -50,6 +54,7 @@ import com.bigdata.rdf.sparql.ast.JoinGroupNode; import com.bigdata.rdf.sparql.ast.NamedSubqueryInclude; import com.bigdata.rdf.sparql.ast.NamedSubqueryRoot; +import com.bigdata.rdf.sparql.ast.NotExistsNode; import com.bigdata.rdf.sparql.ast.PathNode; import com.bigdata.rdf.sparql.ast.ValueExpressionNode; import com.bigdata.rd... [truncated message content] |
From: <tho...@us...> - 2014-05-07 13:29:42
|
Revision: 8214 http://sourceforge.net/p/bigdata/code/8214 Author: thompsonbry Date: 2014-05-07 13:29:39 +0000 (Wed, 07 May 2014) Log Message: ----------- Added the context path to the URL presented in the HA version of the /status page for the remote server. Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HAStatusServletUtil.java Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HAStatusServletUtil.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HAStatusServletUtil.java 2014-05-07 13:10:19 UTC (rev 8213) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HAStatusServletUtil.java 2014-05-07 13:29:39 UTC (rev 8214) @@ -42,6 +42,7 @@ import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooKeeper; +import com.bigdata.BigdataStatics; import com.bigdata.ha.HAGlue; import com.bigdata.ha.HAStatusEnum; import com.bigdata.ha.QuorumService; @@ -728,7 +729,13 @@ final int pipelineIndex = indexOf(serviceId, pipeline); - final String nssUrl = "http://" + hostname + ":" + nssPort; + /* + * TODO This assumes that the context path at the remote + * service is the same as the context path for the local + * service. + */ + final String nssUrl = "http://" + hostname + ":" + nssPort + + BigdataStatics.getContextPath(); // hyper link to NSS service. p.node("a").attr("href", nssUrl).text(nssUrl).close(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2014-05-07 13:10:23
|
Revision: 8213 http://sourceforge.net/p/bigdata/code/8213 Author: thompsonbry Date: 2014-05-07 13:10:19 +0000 (Wed, 07 May 2014) Log Message: ----------- Fixed the name of the log file for the A server (was HALog-B.txt). Not sure how that happened. Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/log4j-template-A.properties Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/log4j-template-A.properties =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/log4j-template-A.properties 2014-05-07 13:09:19 UTC (rev 8212) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/log4j-template-A.properties 2014-05-07 13:10:19 UTC (rev 8213) @@ -16,10 +16,21 @@ #log4j.logger.com.bigdata.zookeeper.ZooHelper=ALL log4j.logger.com.bigdata.rdf.internal.LexiconConfiguration=FATAL +# webapp logging. +log4j.logger.com.bigdata.rdf.sail.webapp=ALL +#log4j.logger.com.bigdata.rdf.sail.webapp.RESTServlet=INFO +#log4j.logger.com.bigdata.rdf.sail.webapp.HALoadBalancerServlet=ALL +#log4j.logger.com.bigdata.ganglia.GangliaService=INFO + +# jetty debug logging. +log4j.logger.org.eclipse.jetty=INFO +#log4j.logger.org.eclipse.jetty.client=DEBUG +#log4j.logger.org.eclipse.jetty.proxy=DEBUG + log4j.appender.haLog=org.apache.log4j.FileAppender log4j.appender.haLog.Threshold=ALL # Note: path is relative to the directory in which the service starts. -log4j.appender.haLog.File=halog-B.txt +log4j.appender.haLog.File=halog-A.txt log4j.appender.haLog.Append=true log4j.appender.haLog.layout=org.apache.log4j.PatternLayout log4j.appender.haLog.layout.ConversionPattern=%-5p: %d{HH:mm:ss,SSS} %r %X{hostname} %X{serviceUUID} %X{taskname} %X{timestamp} %X{resources} %t %l: %m%n \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2014-05-07 13:09:24
|
Revision: 8212 http://sourceforge.net/p/bigdata/code/8212 Author: thompsonbry Date: 2014-05-07 13:09:19 +0000 (Wed, 07 May 2014) Log Message: ----------- HAJournalServer now understands the jetty.dump.start environment variable. Modified the HA CI test suite to pass through the jetty.dump.start environment variable if set in the environment that runs the test suite JVM. Rolled back a change to jetty.xml that is breaking the HA CI test server startup. I will have to take this up with the jetty folks. This change was based on a recommended simplification of jetty.xml. The exception from HA CI was: {{{ WARN : 07:59:54,422 1620 com.bigdata.journal.jini.ha.HAJournalServer org.eclipse.jetty.webapp.WebAppContext.doStart(WebAppContext.java:506): Failed startup of context o.e.j.w.WebAppContext@718acd64{/bigdata,null,null}{"."} java.io.FileNotFoundException: "." at org.eclipse.jetty.webapp.WebInfConfiguration.unpack(WebInfConfiguration.java:493) at org.eclipse.jetty.webapp.WebInfConfiguration.preConfigure(WebInfConfiguration.java:72) at org.eclipse.jetty.webapp.WebAppContext.preConfigure(WebAppContext.java:460) at org.eclipse.jetty.webapp.WebAppContext.doStart(WebAppContext.java:496) at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68) at org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:125) at org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:107) at org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:60) at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68) at org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:125) at org.eclipse.jetty.server.Server.start(Server.java:358) at org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:107) at org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:60) at org.eclipse.jetty.server.Server.doStart(Server.java:325) at org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:68) at com.bigdata.journal.jini.ha.HAJournalServer.startNSS(HAJournalServer.java:4550) at com.bigdata.journal.jini.ha.HAJournalServer.startUpHook(HAJournalServer.java:883) at com.bigdata.journal.jini.ha.AbstractServer.run(AbstractServer.java:1881) at com.bigdata.journal.jini.ha.HAJournalServer.<init>(HAJournalServer.java:623) at com.bigdata.journal.jini.ha.HAJournalServer.main(HAJournalServer.java:4763) }}} Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournalServer.java branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3JournalServerTestCase.java branches/BIGDATA_RELEASE_1_3_0/bigdata-war/src/jetty.xml Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournalServer.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournalServer.java 2014-05-06 19:51:59 UTC (rev 8211) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HAJournalServer.java 2014-05-07 13:09:19 UTC (rev 8212) @@ -4549,6 +4549,13 @@ // Start the server. jettyServer.start(); + if (Boolean.getBoolean("jetty.dump.start")) { + + // Support the jetty dump-after-start semantics. + log.warn(jettyServer.dump()); + + } + /* * Report *an* effective URL of this service. * Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3JournalServerTestCase.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3JournalServerTestCase.java 2014-05-06 19:51:59 UTC (rev 8211) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3JournalServerTestCase.java 2014-05-07 13:09:19 UTC (rev 8212) @@ -2137,6 +2137,13 @@ private final String serviceName; private final UUID serviceId; private final int jettyPort; + /** + * The value of this environment variable is passed down. You can set + * this environment variable to force jetty to dump its internal start + * after start. + */ + private final boolean jettyDumpStart = Boolean + .getBoolean("jetty.dump.start"); // private final File serviceDir; private final String[] args; @@ -2230,6 +2237,12 @@ private final String JETTY_RESOURCE_BASE = "jetty.resourceBase"; /** + * Used to override the <code>jetty.dump.start</code> environment + * property. + */ + private final String TEST_JETTY_DUMP_START = "jetty.dump.start"; + + /** * The absolute effective path of the service directory. This is * overridden on the {@link #TEST_SERVICE_DIR} environment variable * and in the deployed HAJournal.config file in order to have the @@ -2277,6 +2290,9 @@ // Override the location of the webapp as deployed. cmds.add("-D" + JETTY_RESOURCE_BASE + "=\".\""); + // Override the jetty.dump.start. + cmds.add("-D" + TEST_JETTY_DUMP_START + "=" + jettyDumpStart); + super.addCommandArgs(cmds); for (String arg : args) { Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-war/src/jetty.xml =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-war/src/jetty.xml 2014-05-06 19:51:59 UTC (rev 8211) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-war/src/jetty.xml 2014-05-07 13:09:19 UTC (rev 8212) @@ -122,15 +122,14 @@ <!-- =========================================================== --> <!-- Set handler Collection Structure --> <!-- =========================================================== --> + <!-- Recommended approach: does not work for HA CI test suite. <Set name="handler"> <New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection"> <Set name="handlers"> <Array type="org.eclipse.jetty.server.Handler"> <Item> - <!-- This is the bigdata web application. --> <New id="WebAppContext" class="org.eclipse.jetty.webapp.WebAppContext"> <Set name="war"> - <!-- The location of the top-level of the bigdata webapp. --> <SystemProperty name="jetty.resourceBase" default="bigdata-war/src" /> </Set> <Set name="contextPath">/bigdata</Set> @@ -142,6 +141,46 @@ </Array> </Set> </New> + </Set> --> + <Set name="handler"> + <New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection"> + <Set name="handlers"> + <Array type="org.eclipse.jetty.server.Handler"> + <Item> + <!-- This is the bigdata web application. --> + <New id="WebAppContext" class="org.eclipse.jetty.webapp.WebAppContext"> + <Set name="resourceBase"> + <!-- The location of the top-level of the bigdata webapp. --> + <SystemProperty name="jetty.resourceBase" default="bigdata-war/src" /> + </Set> + <Set name="contextPath">/bigdata</Set> + <Set name="descriptor">WEB-INF/web.xml</Set> + <Set name="parentLoaderPriority">true</Set> + <Set name="extractWAR">false</Set> + </New> + </Item> + <Item> + <!-- This appears to be necessary in addition to the above. --> + <!-- Without this, it will not resolve http://localhost:8080/ --> + <!-- and can fail to deliver some of the static content. --> + <New id="ResourceHandler" class="org.eclipse.jetty.server.handler.ResourceHandler"> + <Set name="resourceBase"> + <!-- The location of the top-level of the bigdata webapp. --> + <SystemProperty name="jetty.resourceBase" default="bigdata-war/src" /> + </Set> + <Set name="welcomeFiles"> + <Array type="java.lang.String"> + <Item>html/index.html</Item> + </Array> + </Set> + </New> + </Item> + <!-- <Item> + <New id="DefaultHandler" class="org.eclipse.jetty.server.handler.DefaultHandler"></New> + </Item> --> + </Array> + </Set> + </New> </Set> <!-- =========================================================== --> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2014-05-06 19:52:06
|
Revision: 8211 http://sourceforge.net/p/bigdata/code/8211 Author: thompsonbry Date: 2014-05-06 19:51:59 +0000 (Tue, 06 May 2014) Log Message: ----------- Disabling the jetty-jmx configuration in jetty.xml. This is breaking CI. Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-war/src/jetty.xml Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-war/src/jetty.xml =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-war/src/jetty.xml 2014-05-06 19:02:38 UTC (rev 8210) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-war/src/jetty.xml 2014-05-06 19:51:59 UTC (rev 8211) @@ -40,6 +40,7 @@ <!-- =========================================================== --> <!-- Initialize the Jetty MBean container --> <!-- =========================================================== --> + <!-- Note: This breaks CI if it is enabled <Call name="addBean"> <Arg> <New id="MBeanContainer" class="org.eclipse.jetty.jmx.MBeanContainer"> @@ -48,16 +49,16 @@ </Arg> </New> </Arg> - </Call> + </Call>--> - <!-- Add the static log to the MBean server. --> + <!-- Add the static log to the MBean server. <Call name="addBean"> <Arg> <New class="org.eclipse.jetty.util.log.Log" /> </Arg> - </Call> + </Call>--> - <!-- For remote MBean access (optional) --> + <!-- For remote MBean access (optional) <New id="ConnectorServer" class="org.eclipse.jetty.jmx.ConnectorServer"> <Arg> <New class="javax.management.remote.JMXServiceURL"> @@ -69,7 +70,7 @@ </Arg> <Arg>org.eclipse.jetty.jmx:name=rmiconnectorserver</Arg> <Call name="start" /> - </New> + </New>--> <!-- =========================================================== --> <!-- Http Configuration. --> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-06 19:02:41
|
Revision: 8210 http://sourceforge.net/p/bigdata/code/8210 Author: dmekonnen Date: 2014-05-06 19:02:38 +0000 (Tue, 06 May 2014) Log Message: ----------- Internal documentation text. Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/bin/HARestore Modified: branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/bin/HARestore =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/bin/HARestore 2014-05-06 18:56:06 UTC (rev 8209) +++ branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/bin/HARestore 2014-05-06 19:02:38 UTC (rev 8210) @@ -1,5 +1,16 @@ #!/bin/bash +# +# This script has been developed for the "systap-aws-bigdata-ha" cluster deployment package. +# +# The HARestore script will recreate the Bigdata HA journal file from log and snapshot files. +# The intended use of the script is to restore a journal file that resides on an ephemeral +# storage media following a system reboot. The script should not be executed while Bigdata +# is running. +# +# HARestore takes no arguments and assumes the Bigdata journal filename convention: "bigdata-ha.jnl". +# + source /etc/default/bigdataHA SERVICE_DIR="$FED_DIR/$FEDNAME/$LOGICAL_SERVICE_ID/HAJournalServer" This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |