This list is closed, nobody may subscribe to it.
2010 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
(139) |
Aug
(94) |
Sep
(232) |
Oct
(143) |
Nov
(138) |
Dec
(55) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2011 |
Jan
(127) |
Feb
(90) |
Mar
(101) |
Apr
(74) |
May
(148) |
Jun
(241) |
Jul
(169) |
Aug
(121) |
Sep
(157) |
Oct
(199) |
Nov
(281) |
Dec
(75) |
2012 |
Jan
(107) |
Feb
(122) |
Mar
(184) |
Apr
(73) |
May
(14) |
Jun
(49) |
Jul
(26) |
Aug
(103) |
Sep
(133) |
Oct
(61) |
Nov
(51) |
Dec
(55) |
2013 |
Jan
(59) |
Feb
(72) |
Mar
(99) |
Apr
(62) |
May
(92) |
Jun
(19) |
Jul
(31) |
Aug
(138) |
Sep
(47) |
Oct
(83) |
Nov
(95) |
Dec
(111) |
2014 |
Jan
(125) |
Feb
(60) |
Mar
(119) |
Apr
(136) |
May
(270) |
Jun
(83) |
Jul
(88) |
Aug
(30) |
Sep
(47) |
Oct
(27) |
Nov
(23) |
Dec
|
2015 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
|
Sep
(3) |
Oct
|
Nov
|
Dec
|
2016 |
Jan
|
Feb
|
Mar
(4) |
Apr
(1) |
May
|
Jun
|
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
From: <tho...@us...> - 2014-05-14 13:56:32
|
Revision: 8309 http://sourceforge.net/p/bigdata/code/8309 Author: thompsonbry Date: 2014-05-14 13:56:29 +0000 (Wed, 14 May 2014) Log Message: ----------- Changed the name of the environment variable to enable the built-in ganglia peer as a listener from GANGLIA_LISTENER => GANGLIA_LISTEN See #624 (HA LBS). Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/src/resources/HAJournal/HAJournal.config branches/BIGDATA_RELEASE_1_3_0/src/resources/bin/startHAServices branches/BIGDATA_RELEASE_1_3_0/src/resources/etc/default/bigdataHA Modified: branches/BIGDATA_RELEASE_1_3_0/src/resources/HAJournal/HAJournal.config =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/src/resources/HAJournal/HAJournal.config 2014-05-14 10:52:46 UTC (rev 8308) +++ branches/BIGDATA_RELEASE_1_3_0/src/resources/HAJournal/HAJournal.config 2014-05-14 13:56:29 UTC (rev 8309) @@ -355,15 +355,15 @@ // Platform and process performance counters. This requires external // software on some platforms (vmstat, pidstat, iostat, etc.). // - // This is necessary for the GangliaLBSPolicy. + // This is necessary for the GangliaLBSPolicy or CountersLBSPolicy. new NV(Journal.Options.COLLECT_PLATFORM_STATISTICS, - ConfigMath.getProperty("COLLECT_PLATFORM_STATISTICS","true")), + ConfigMath.getProperty("COLLECT_PLATFORM_STATISTICS","false")), // Use bigdata-ganglia module to build internal model of cluster load. // // This is required for the GangliaLBSPolicy. new NV(com.bigdata.journal.GangliaPlugIn.Options.GANGLIA_LISTEN, - ConfigMath.getProperty("GANGLIA_LISTENER","true")), + ConfigMath.getProperty("GANGLIA_LISTEN","false")), // Use bigdata-ganglia module to report service metrics to ganglia. // Modified: branches/BIGDATA_RELEASE_1_3_0/src/resources/bin/startHAServices =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/src/resources/bin/startHAServices 2014-05-14 10:52:46 UTC (rev 8308) +++ branches/BIGDATA_RELEASE_1_3_0/src/resources/bin/startHAServices 2014-05-14 13:56:29 UTC (rev 8309) @@ -89,6 +89,7 @@ -DCOLLECT_QUEUE_STATISTICS=${COLLECT_QUEUE_STATISTICS}\ -DCOLLECT_PLATFORM_STATISTICS=${COLLECT_PLATFORM_STATISTICS}\ -DGANGLIA_REPORT=${GANGLIA_REPORT}\ + -DGANGLIA_LISTEN=${GANGLIA_LISTEN}\ -DSYSSTAT_DIR=${SYSSTAT_DIR}\ -Dcom.bigdata.counters.linux.sysstat.path=${SYSSTAT_DIR}\ " Modified: branches/BIGDATA_RELEASE_1_3_0/src/resources/etc/default/bigdataHA =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/src/resources/etc/default/bigdataHA 2014-05-14 10:52:46 UTC (rev 8308) +++ branches/BIGDATA_RELEASE_1_3_0/src/resources/etc/default/bigdataHA 2014-05-14 13:56:29 UTC (rev 8309) @@ -46,5 +46,5 @@ #export COLLECT_QUEUE_STATISTICS= #export COLLECT_PLATFORM_STATISTICS= #export GANGLIA_REPORT= -#export GANGLIA_LISTENER= +#export GANGLIA_LISTEN= #export SYSSTAT_DIR= This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2014-05-14 10:52:49
|
Revision: 8308 http://sourceforge.net/p/bigdata/code/8308 Author: thompsonbry Date: 2014-05-14 10:52:46 +0000 (Wed, 14 May 2014) Log Message: ----------- Removed stderr log message from AALP. Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/ArbitraryLengthPathNode.java Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/ArbitraryLengthPathNode.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/ArbitraryLengthPathNode.java 2014-05-14 02:07:40 UTC (rev 8307) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/ArbitraryLengthPathNode.java 2014-05-14 10:52:46 UTC (rev 8308) @@ -258,7 +258,7 @@ zeroMatchAdjustment = Long.MAX_VALUE / 2; // The following is more accurate, but more expensive and unnecessary. // db.getURICount() + db.getBNodeCount(); - System.err.println("adj: "+zeroMatchAdjustment); +// System.err.println("adj: "+zeroMatchAdjustment); break; } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2014-05-14 02:07:45
|
Revision: 8307 http://sourceforge.net/p/bigdata/code/8307 Author: mrpersonick Date: 2014-05-14 02:07:40 +0000 (Wed, 14 May 2014) Log Message: ----------- fixing a ClassCastException in the Blueprints test suite Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphEmbedded.java Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java 2014-05-14 00:38:59 UTC (rev 8306) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java 2014-05-14 02:07:40 UTC (rev 8307) @@ -992,7 +992,6 @@ FEATURES.supportsLongProperty = true; FEATURES.supportsMapProperty = false; FEATURES.supportsStringProperty = true; - FEATURES.supportsDuplicateEdges = true; FEATURES.supportsSelfLoops = true; FEATURES.isPersistent = true; Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java 2014-05-14 00:38:59 UTC (rev 8306) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java 2014-05-14 02:07:40 UTC (rev 8307) @@ -25,6 +25,7 @@ import com.bigdata.rdf.sail.remote.BigdataSailRemoteRepository; import com.bigdata.rdf.sail.remote.BigdataSailRemoteRepositoryConnection; import com.bigdata.rdf.sail.webapp.client.RemoteRepository; +import com.tinkerpop.blueprints.Features; /** * This is a thin-client implementation of a Blueprints wrapper around the @@ -109,5 +110,51 @@ throw new RuntimeException(e); } } + + protected static final Features FEATURES = new Features(); + @Override + public Features getFeatures() { + + return FEATURES; + + } + + static { + + FEATURES.supportsSerializableObjectProperty = BigdataGraph.FEATURES.supportsSerializableObjectProperty; + FEATURES.supportsBooleanProperty = BigdataGraph.FEATURES.supportsBooleanProperty; + FEATURES.supportsDoubleProperty = BigdataGraph.FEATURES.supportsDoubleProperty; + FEATURES.supportsFloatProperty = BigdataGraph.FEATURES.supportsFloatProperty; + FEATURES.supportsIntegerProperty = BigdataGraph.FEATURES.supportsIntegerProperty; + FEATURES.supportsPrimitiveArrayProperty = BigdataGraph.FEATURES.supportsPrimitiveArrayProperty; + FEATURES.supportsUniformListProperty = BigdataGraph.FEATURES.supportsUniformListProperty; + FEATURES.supportsMixedListProperty = BigdataGraph.FEATURES.supportsMixedListProperty; + FEATURES.supportsLongProperty = BigdataGraph.FEATURES.supportsLongProperty; + FEATURES.supportsMapProperty = BigdataGraph.FEATURES.supportsMapProperty; + FEATURES.supportsStringProperty = BigdataGraph.FEATURES.supportsStringProperty; + FEATURES.supportsDuplicateEdges = BigdataGraph.FEATURES.supportsDuplicateEdges; + FEATURES.supportsSelfLoops = BigdataGraph.FEATURES.supportsSelfLoops; + FEATURES.isPersistent = BigdataGraph.FEATURES.isPersistent; + FEATURES.isWrapper = BigdataGraph.FEATURES.isWrapper; + FEATURES.supportsVertexIteration = BigdataGraph.FEATURES.supportsVertexIteration; + FEATURES.supportsEdgeIteration = BigdataGraph.FEATURES.supportsEdgeIteration; + FEATURES.supportsVertexIndex = BigdataGraph.FEATURES.supportsVertexIndex; + FEATURES.supportsEdgeIndex = BigdataGraph.FEATURES.supportsEdgeIndex; + FEATURES.ignoresSuppliedIds = BigdataGraph.FEATURES.ignoresSuppliedIds; +// FEATURES.supportsTransactions = BigdataGraph.FEATURES.supportsTransactions; + FEATURES.supportsIndices = BigdataGraph.FEATURES.supportsIndices; + FEATURES.supportsKeyIndices = BigdataGraph.FEATURES.supportsKeyIndices; + FEATURES.supportsVertexKeyIndex = BigdataGraph.FEATURES.supportsVertexKeyIndex; + FEATURES.supportsEdgeKeyIndex = BigdataGraph.FEATURES.supportsEdgeKeyIndex; + FEATURES.supportsEdgeRetrieval = BigdataGraph.FEATURES.supportsEdgeRetrieval; + FEATURES.supportsVertexProperties = BigdataGraph.FEATURES.supportsVertexProperties; + FEATURES.supportsEdgeProperties = BigdataGraph.FEATURES.supportsEdgeProperties; + FEATURES.supportsThreadedTransactions = BigdataGraph.FEATURES.supportsThreadedTransactions; + + // override + FEATURES.supportsTransactions = false; //BigdataGraph.FEATURES.supportsTransactions; + + } + } Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphEmbedded.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphEmbedded.java 2014-05-14 00:38:59 UTC (rev 8306) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphEmbedded.java 2014-05-14 02:07:40 UTC (rev 8307) @@ -22,22 +22,12 @@ */ package com.bigdata.blueprints; -import java.util.List; -import java.util.Set; - -import org.openrdf.model.Literal; -import org.openrdf.model.URI; -import org.openrdf.query.GraphQueryResult; import org.openrdf.repository.RepositoryConnection; import com.bigdata.rdf.sail.BigdataSail; import com.bigdata.rdf.sail.BigdataSailRepository; -import com.bigdata.rdf.sail.BigdataSailRepositoryConnection; -import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.Features; -import com.tinkerpop.blueprints.GraphQuery; import com.tinkerpop.blueprints.TransactionalGraph; -import com.tinkerpop.blueprints.Vertex; /** * This is the most basic possible implementation of the Blueprints Graph API. @@ -159,11 +149,51 @@ } - static { + protected static final Features FEATURES = new Features(); - FEATURES.supportsTransactions = true; + @Override + public Features getFeatures() { + + return FEATURES; } + + static { + + FEATURES.supportsSerializableObjectProperty = BigdataGraph.FEATURES.supportsSerializableObjectProperty; + FEATURES.supportsBooleanProperty = BigdataGraph.FEATURES.supportsBooleanProperty; + FEATURES.supportsDoubleProperty = BigdataGraph.FEATURES.supportsDoubleProperty; + FEATURES.supportsFloatProperty = BigdataGraph.FEATURES.supportsFloatProperty; + FEATURES.supportsIntegerProperty = BigdataGraph.FEATURES.supportsIntegerProperty; + FEATURES.supportsPrimitiveArrayProperty = BigdataGraph.FEATURES.supportsPrimitiveArrayProperty; + FEATURES.supportsUniformListProperty = BigdataGraph.FEATURES.supportsUniformListProperty; + FEATURES.supportsMixedListProperty = BigdataGraph.FEATURES.supportsMixedListProperty; + FEATURES.supportsLongProperty = BigdataGraph.FEATURES.supportsLongProperty; + FEATURES.supportsMapProperty = BigdataGraph.FEATURES.supportsMapProperty; + FEATURES.supportsStringProperty = BigdataGraph.FEATURES.supportsStringProperty; + FEATURES.supportsDuplicateEdges = BigdataGraph.FEATURES.supportsDuplicateEdges; + FEATURES.supportsSelfLoops = BigdataGraph.FEATURES.supportsSelfLoops; + FEATURES.isPersistent = BigdataGraph.FEATURES.isPersistent; + FEATURES.isWrapper = BigdataGraph.FEATURES.isWrapper; + FEATURES.supportsVertexIteration = BigdataGraph.FEATURES.supportsVertexIteration; + FEATURES.supportsEdgeIteration = BigdataGraph.FEATURES.supportsEdgeIteration; + FEATURES.supportsVertexIndex = BigdataGraph.FEATURES.supportsVertexIndex; + FEATURES.supportsEdgeIndex = BigdataGraph.FEATURES.supportsEdgeIndex; + FEATURES.ignoresSuppliedIds = BigdataGraph.FEATURES.ignoresSuppliedIds; +// FEATURES.supportsTransactions = BigdataGraph.FEATURES.supportsTransactions; + FEATURES.supportsIndices = BigdataGraph.FEATURES.supportsIndices; + FEATURES.supportsKeyIndices = BigdataGraph.FEATURES.supportsKeyIndices; + FEATURES.supportsVertexKeyIndex = BigdataGraph.FEATURES.supportsVertexKeyIndex; + FEATURES.supportsEdgeKeyIndex = BigdataGraph.FEATURES.supportsEdgeKeyIndex; + FEATURES.supportsEdgeRetrieval = BigdataGraph.FEATURES.supportsEdgeRetrieval; + FEATURES.supportsVertexProperties = BigdataGraph.FEATURES.supportsVertexProperties; + FEATURES.supportsEdgeProperties = BigdataGraph.FEATURES.supportsEdgeProperties; + FEATURES.supportsThreadedTransactions = BigdataGraph.FEATURES.supportsThreadedTransactions; + + // override + FEATURES.supportsTransactions = true; //BigdataGraph.FEATURES.supportsTransactions; + + } // @Override This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2014-05-14 00:39:02
|
Revision: 8306 http://sourceforge.net/p/bigdata/code/8306 Author: thompsonbry Date: 2014-05-14 00:38:59 +0000 (Wed, 14 May 2014) Log Message: ----------- Modified test to use unique URLs in an attempt to cure a CI failure in TestServiceRegistry. I am hypothesizing a side-effect through the static ServiceRegistery instance with some other tests. Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/service/TestServiceRegistry.java Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/service/TestServiceRegistry.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/service/TestServiceRegistry.java 2014-05-14 00:32:00 UTC (rev 8305) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/service/TestServiceRegistry.java 2014-05-14 00:38:59 UTC (rev 8306) @@ -24,6 +24,7 @@ package com.bigdata.rdf.sparql.ast.eval.service; import java.util.Iterator; +import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import org.apache.http.conn.ClientConnectionManager; @@ -78,9 +79,12 @@ */ public void test_addGetRemove() { - final URI serviceURI1 = new URIImpl("http://www.bigdata.com/myService"); + final URI serviceURI1 = new URIImpl("http://www.bigdata.com/myService/" + + getName() + "/" + UUID.randomUUID()); - final URI serviceURI2 = new URIImpl("http://www.bigdata.com/myService2"); + final URI serviceURI2 = new URIImpl( + "http://www.bigdata.com/myService2/" + getName() + "/" + + UUID.randomUUID()); final RemoteServiceOptions options = new RemoteServiceOptions(); @@ -122,9 +126,17 @@ */ public void test_serviceAlias() { - final URI serviceURI1 = new URIImpl("http://www.bigdata.com/myService"); + /* + * Note: Avoid possible side-effects from uncleared service registries + * using a UUID to make these URIs distinct from others that might be in + * the ServiceRegistry. + */ + final URI serviceURI1 = new URIImpl("http://www.bigdata.com/myService/" + + getName() + "/" + UUID.randomUUID()); - final URI serviceURI2 = new URIImpl("http://www.bigdata.com/myService2"); + final URI serviceURI2 = new URIImpl( + "http://www.bigdata.com/myService2/" + getName() + "/" + + UUID.randomUUID()); final RemoteServiceOptions options = new RemoteServiceOptions(); @@ -232,10 +244,12 @@ final BigdataValueFactory f = store.getValueFactory(); final BigdataURI serviceURI1 = f - .createURI("http://www.bigdata.com/myService"); + .createURI("http://www.bigdata.com/myService/" + getName() + + "/" + UUID.randomUUID()); final BigdataURI serviceURI2 = f - .createURI("http://www.bigdata.com/myService2"); + .createURI("http://www.bigdata.com/myService2/" + getName() + + "/" + UUID.randomUUID()); final BigdataValue[] values = new BigdataValue[] { // serviceURI1,// @@ -320,7 +334,8 @@ */ public void test_customService() throws SailException { - final URI serviceURI1 = new URIImpl("http://www.bigdata.com/myService"); + final URI serviceURI1 = new URIImpl("http://www.bigdata.com/myService/" + + getName() + "/" + UUID.randomUUID()); final MyCustomServiceFactory serviceFactory = new MyCustomServiceFactory( new OpenrdfNativeServiceOptions()); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2014-05-14 00:32:05
|
Revision: 8305 http://sourceforge.net/p/bigdata/code/8305 Author: thompsonbry Date: 2014-05-14 00:32:00 +0000 (Wed, 14 May 2014) Log Message: ----------- Made the DEFAULT instance "final". Already private and static. Chasing odd test failure in CI {{{ junit.framework.AssertionFailedError at com.bigdata.rdf.sparql.ast.eval.service.TestServiceRegistry.test_serviceAlias(TestServiceRegistry.java:145) }}} Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/service/ServiceRegistry.java Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/service/ServiceRegistry.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/service/ServiceRegistry.java 2014-05-13 23:59:41 UTC (rev 8304) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/service/ServiceRegistry.java 2014-05-14 00:32:00 UTC (rev 8305) @@ -37,7 +37,7 @@ /** * TODO Allow SPI pattern for override? */ - private static ServiceRegistry DEFAULT = new ServiceRegistry(); + private static final ServiceRegistry DEFAULT = new ServiceRegistry(); static public ServiceRegistry getInstance() { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-13 23:59:44
|
Revision: 8304 http://sourceforge.net/p/bigdata/code/8304 Author: dmekonnen Date: 2014-05-13 23:59:41 +0000 (Tue, 13 May 2014) Log Message: ----------- minor version bump to sync with opscode update for documentation Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb 2014-05-13 23:37:01 UTC (rev 8303) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb 2014-05-13 23:59:41 UTC (rev 8304) @@ -4,7 +4,7 @@ license 'GNU GPLv2' description 'Installs/Configures the Bigdata triple/graph store.' long_description IO.read(File.join(File.dirname(__FILE__), 'README.md')) -version '0.1.1' +version '0.1.2' depends 'apt' depends 'java', '>= 1.22.0' depends 'ant' This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-13 23:37:04
|
Revision: 8303 http://sourceforge.net/p/bigdata/code/8303 Author: dmekonnen Date: 2014-05-13 23:37:01 +0000 (Tue, 13 May 2014) Log Message: ----------- documentation format tweak. Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md 2014-05-13 23:29:16 UTC (rev 8302) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md 2014-05-13 23:37:01 UTC (rev 8303) @@ -19,123 +19,69 @@ ### General Attributes -`node['bigdata'][:home]` - - The root directory for bigdata contents -`/var/lib/bigdata` +`node['bigdata'][:home]` - The root directory for bigdata contents (Default: `/var/lib/bigdata`) -`node['bigdata'][:url]` - - Where to download the bigdata package file from - (Defaults: Tomcat: http://softlayer-dal.dl.sourceforge.net/project/bigdata/bigdata/1.3.0/bigdata.war / NSS: http://bigdata.com/deploy/bigdata-1.3.0.tgz) +`node['bigdata'][:url]` - Where to download the bigdata package file from. (Defaults: Tomcat: http://softlayer-dal.dl.sourceforge.net/project/bigdata/bigdata/1.3.0/bigdata.war / NSS: http://bigdata.com/deploy/bigdata-1.3.0.tgz) `node['bigdata'][:data_dir]` - Where the bigdata.jnl resides. Discussed in <a href="http://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer#Common_Startup_Problems">Common Startup Problmems</a> (Defaults: Tomcat: `node['bigdata'][:home]`/data / NSS: `node['bigdata'][:home]`/var/data) -`node['bigdata'][:log_dir]` - - Where bigdata log files should reside (i.e. queryLog.csv, rules.log, queryRunStateLog.csv). - (Default: Tomcat: `node['bigdata'][:home]`/var/log / NSS: `node['bigdata'][:home]`/var/log) +`node['bigdata'][:log_dir]` - Where bigdata log files should reside (i.e. queryLog.csv, rules.log, queryRunStateLog.csv). (Default: Tomcat: `node['bigdata'][:home]`/var/log / NSS: `node['bigdata'][:home]`/var/log) -`node['bigdata'][:properties]` - - File path to the Bigdata properties file. - (Default: `node['bigdata'][:home]`/RWStore.properties) +`node['bigdata'][:properties]` - File path to the Bigdata properties file. (Default: `node['bigdata'][:home]`/RWStore.properties) -`node['bigdata'][:svn_branch]` - - The Subversion branch to retrieve source files from. - (Default: Tomcat: https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA\_RELEASE\_1\_3\_0 / NSS: https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT\_BRANCH\_1\_3\_1) +`node['bigdata'][:svn_branch]` - The Subversion branch to retrieve source files from. (Default: Tomcat: https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA\_RELEASE\_1\_3\_0 / NSS: https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT\_BRANCH\_1\_3\_1) -`node['bigdata'][:source]` - - The directory to retrieve Subversion contents into. - (Default: bigdata-code) +`node['bigdata'][:source]` - The directory to retrieve Subversion contents into. (Default: bigdata-code) -`node['bigdata']['journal.AbstractJournal.bufferMode']` - - Journal Buffer Mode - (Default: DiskRW) +`node['bigdata']['journal.AbstractJournal.bufferMode']` - Journal Buffer Mode (Default: DiskRW) -`node['bigdata']['service.AbstractTransactionService.minReleaseAge']` - - Minimum Release Age - (Default: 1) +`node['bigdata']['service.AbstractTransactionService.minReleaseAge']` - Minimum Release Age (Default: 1) -`node['bigdata']['btree.writeRetentionQueue.capacity']` - - Writing retention queue length. - (Default: 4000) +`node['bigdata']['btree.writeRetentionQueue.capacity']` - Writing retention queue length. (Default: 4000) -`node['bigdata']['btree.BTree.branchingFactor']` - - Branching factor for the journal's B-Tree. - (Default: 128) +`node['bigdata']['btree.BTree.branchingFactor']` - Branching factor for the journal's B-Tree. (Default: 128) -`node['bigdata']['journal.AbstractJournal.initialExtent']` - - Journal's initial extent - (Default: 209715200) +`node['bigdata']['journal.AbstractJournal.initialExtent']` - Journal's initial extent (Default: 209715200) -`node['bigdata']['journal.AbstractJournal.maximumExtent']` - - Journal's maximum extent - (Default: 209715200) +`node['bigdata']['journal.AbstractJournal.maximumExtent']` - Journal's maximum extent (Default: 209715200) -`node['bigdata']['rdf.sail.truthMaintenance']` - - Switch Truth Maintenance on/off. - (Default: false) +`node['bigdata']['rdf.sail.truthMaintenance']` - Switch Truth Maintenance on/off. (Default: false) -`node['bigdata']['rdf.store.AbstractTripleStore.quads']` - - Switch Quads Mode on/off. - (Default: false) +`node['bigdata']['rdf.store.AbstractTripleStore.quads']` - Switch Quads Mode on/off. (Default: false) -`node['bigdata']['rdf.store.AbstractTripleStore.statementIdentifiers']` - - Switch statement identifiers on/off. - (Default: false) +`node['bigdata']['rdf.store.AbstractTripleStore.statementIdentifiers']` - Switch statement identifiers on/off. (Default: false) -`node['bigdata']['rdf.store.AbstractTripleStore.textIndex']` - - Switch text indexing on/off. - (Default: false) +`node['bigdata']['rdf.store.AbstractTripleStore.textIndex']` - Switch text indexing on/off. (Default: false) -`node['bigdata']['rdf.store.AbstractTripleStore.axiomsClass']` - - The class to handle RDF axioms. - (Default: com.bigdata.rdf.axioms.NoAxioms) +`node['bigdata']['rdf.store.AbstractTripleStore.axiomsClass']` - The class to handle RDF axioms. (Default: com.bigdata.rdf.axioms.NoAxioms) -`node['bigdata']['namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor']` - - Branching factor for the journal's Lexical B-Tree. - (Default:- 400) +`node['bigdata']['namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor']` - Branching factor for the journal's Lexical B-Tree. (Default:- 400) -`node['bigdata']['namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor']` - - Branching factor for the journal's SPO B-Tree. - (Default: 1024) +`node['bigdata']['namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor']` - Branching factor for the journal's SPO B-Tree. (Default: 1024) -`node['bigdata']['rdf.sail.bufferCapacity']` - - The number of statements to buffer before committing triples to the persistence layer. - (Default: 100000) +`node['bigdata']['rdf.sail.bufferCapacity']` - The number of statements to buffer before committing triples to the persistence layer. (Default: 100000) ### Attributes for Tomcat Based Install -`node['bigdata'][:web_home]` - - The web application root directory for bigdata. - (Default `node['tomcat'][:webapp_dir]`/bigdata) +`node['bigdata'][:web_home]` - The web application root directory for bigdata. (Default `node['tomcat'][:webapp_dir]`/bigdata) -`node['bigdata'][:log4j_properties]` - - File path to the log4j properties file. - (Default `node['bigdata'][:web_home]`/WEB-INF/classes/log4j.properties) +`node['bigdata'][:log4j_properties]` - File path to the log4j properties file. (Default `node['bigdata'][:web_home]`/WEB-INF/classes/log4j.properties) ### Attributes for NanoSparqlServer (NSS) Based Install -`node['bigdata'][:user]` - - The user to install and run bigdata under. - (Default: `bigdata`) +`node['bigdata'][:user]` - The user to install and run bigdata under. (Default: `bigdata`) -`node['bigdata'][:group]` - - The group to install and run bigdata under. - (Default: `bigdata`) +`node['bigdata'][:group]` - The group to install and run bigdata under. (Default: `bigdata`) -`node['bigdata'][:jetty_dir]` - - The Jetty root directory. - (Default: `node['bigdata'][:home]`/var/jetty) +`node['bigdata'][:jetty_dir]` - The Jetty root directory. (Default: `node['bigdata'][:home]`/var/jetty) ### Attributes for MapGraph -`node['mapgraph'][:svn_branch]` - - The Subversion branch to retrieve source files from. - (Default: https://svn.code.sf.net/p/mpgraph/code/trunk) +`node['mapgraph'][:svn_branch]` - The Subversion branch to retrieve source files from. (Default: https://svn.code.sf.net/p/mpgraph/code/trunk) -`node['mapgraph'][:source]` - - The directory to retrieve Subversion contents into. - (Default: mapgraph-code ) +`node['mapgraph'][:source]` - The directory to retrieve Subversion contents into. (Default: mapgraph-code ) Recipes @@ -219,8 +165,6 @@ ] - - ### Trouble Shooting The Bigdta cookbook recipes have been tested thoroughly in the Vagrant context with VirtualBox and AWS providers using Ubuntu 12.04 and Oracle's JDK 7. This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-13 23:29:19
|
Revision: 8302 http://sourceforge.net/p/bigdata/code/8302 Author: dmekonnen Date: 2014-05-13 23:29:16 +0000 (Tue, 13 May 2014) Log Message: ----------- documentation format tweak. Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md 2014-05-13 23:08:07 UTC (rev 8301) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md 2014-05-13 23:29:16 UTC (rev 8302) @@ -18,185 +18,126 @@ ---------- ### General Attributes -<table> - <tr> - <th>Attribute</th> - <th>Description</th> - <th>Default</th> - </tr> - <tr> - <td><tt>node['bigdata'][:home]</tt></td> - <td>The root directory for bigdata contents</td> - <td><tt>/var/lib/bigdata</tt></td> - </tr> - <tr> - <td><tt>node['bigdata'][:url]</tt></td> - <td>Where to download the bigdata package file from</td> - <td>Tomcat: http://softlayer-dal.dl.sourceforge.net/project/bigdata/bigdata/1.3.0/bigdata.war<br/>NSS: http://bigdata.com/deploy/bigdata-1.3.0.tgz</td> - </tr> - <tr> - <td><tt>node['bigdata'][:data_dir]</tt></td> - <td>Where the bigdata.jnl resides. Discussed in <a href="http://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer#Common_Startup_Problems">Common Startup Problmems</a></td> - <td>Tomcat: node['bigdata'][:home]/data<br/>NSS: node['bigdata'][:home]/var/data</td> - </tr> - <tr> - <td><tt>node['bigdata'][:log_dir]</tt></td> - <td>Where bigdata log files should reside (i.e. queryLog.csv, rules.log, queryRunStateLog.csv).</td> - <td>Tomcat: node['bigdata'][:home]/var/log<br/>NSS: node['bigdata'][:home]/var/log</td> - </tr> - <tr> - <td><tt>node['bigdata'][:properties]</tt></td> - <td>File path to the Bigdata properties file.</td> - <td>node['bigdata'][:home]/RWStore.properties</td> - </tr> - <tr> - <td><tt>node['bigdata'][:svn_branch]</tt></td> - <td>The Subversion branch to retrieve source files from.</td> - <td>Tomcat: https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA_RELEASE_1_3_0<br/>NSS: https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT_BRANCH_1_3_1</td> - </tr> - <tr> - <td><tt>node['bigdata'][:source]</tt></td> - <td>The directory to retrieve Subversion contents into.</td> - <td>bigdata-code</td> - </tr> - <tr> - <td><tt>node['bigdata']['journal.AbstractJournal.bufferMode']</tt></td> - <td>Journal Buffer Mode</td> - <td>DiskRW</td> - </tr> - <tr> - <td><tt>node['bigdata']['service.AbstractTransactionService.minReleaseAge']</tt></td> - <td>Minimum Release Age</td> - <td>1</td> - </tr> - <tr> - <td><tt>node['bigdata']['btree.writeRetentionQueue.capacity']</tt></td> - <td>Writing retention queue length.</td> - <td>4000</td> - </tr> - <tr> - <td><tt>node['bigdata']['btree.BTree.branchingFactor']</tt></td> - <td>Branching factor for the journal's B-Tree.</td> - <td>128</td> - </tr> - <tr> - <td><tt>node['bigdata']['journal.AbstractJournal.initialExtent']</tt></td> - <td>Journal's initial extent</td> - <td>209715200</td> - </tr> - <tr> - <td><tt>node['bigdata']['journal.AbstractJournal.maximumExtent']</tt></td> - <td>Journal's maximum extent</td> - <td>209715200</td> - </tr> - <tr> - <td><tt>node['bigdata']['rdf.sail.truthMaintenance']</tt></td> - <td>Switch Truth Maintenance on/off.</td> - <td>false</td> - </tr> - <tr> - <td><tt>node['bigdata']['rdf.store.AbstractTripleStore.quads']</tt></td> - <td>Switch Quads Mode on/off.</td> - <td>false</td> - </tr> - <tr> - <td><tt>node['bigdata']['rdf.store.AbstractTripleStore.statementIdentifiers']</tt></td> - <td>Switch statement identifiers on/off.</td> - <td>false</td> - </tr> - <tr> - <td><tt>node['bigdata']['rdf.store.AbstractTripleStore.textIndex']</tt></td> - <td>Switch text indexing on/off.</td> - <td>false</td> - </tr> - <tr> - <td><tt>node['bigdata']['rdf.store.AbstractTripleStore.axiomsClass']</tt></td> - <td>The class to handle RDF axioms.</td> - <td>com.bigdata.rdf.axioms.NoAxioms</td> - </tr> - <tr> - <td><tt>node['bigdata']['namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor']</tt></td> - <td>Branching factor for the journal's Lexical B-Tree.</td> - <td>400</td> - </tr> - <tr> - <td><tt>node['bigdata']['namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor']</tt></td> - <td>Branching factor for the journal's SPO B-Tree.</td> - <td>1024</td> - </tr> - <tr> - <td><tt>node['bigdata']['rdf.sail.bufferCapacity']</tt></td> - <td>The number of statements to buffer before committing triples to the persistence layer.</td> - <td>100000</td> - </tr> -</table> +`node['bigdata'][:home]` + - The root directory for bigdata contents +`/var/lib/bigdata` + +`node['bigdata'][:url]` + - Where to download the bigdata package file from + (Defaults: Tomcat: http://softlayer-dal.dl.sourceforge.net/project/bigdata/bigdata/1.3.0/bigdata.war / NSS: http://bigdata.com/deploy/bigdata-1.3.0.tgz) + +`node['bigdata'][:data_dir]` + - Where the bigdata.jnl resides. Discussed in <a href="http://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer#Common_Startup_Problems">Common Startup Problmems</a> + (Defaults: Tomcat: `node['bigdata'][:home]`/data / NSS: `node['bigdata'][:home]`/var/data) + +`node['bigdata'][:log_dir]` + - Where bigdata log files should reside (i.e. queryLog.csv, rules.log, queryRunStateLog.csv). + (Default: Tomcat: `node['bigdata'][:home]`/var/log / NSS: `node['bigdata'][:home]`/var/log) + +`node['bigdata'][:properties]` + - File path to the Bigdata properties file. + (Default: `node['bigdata'][:home]`/RWStore.properties) + +`node['bigdata'][:svn_branch]` + - The Subversion branch to retrieve source files from. + (Default: Tomcat: https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA\_RELEASE\_1\_3\_0 / NSS: https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT\_BRANCH\_1\_3\_1) + +`node['bigdata'][:source]` + - The directory to retrieve Subversion contents into. + (Default: bigdata-code) + +`node['bigdata']['journal.AbstractJournal.bufferMode']` + - Journal Buffer Mode + (Default: DiskRW) + +`node['bigdata']['service.AbstractTransactionService.minReleaseAge']` + - Minimum Release Age + (Default: 1) + +`node['bigdata']['btree.writeRetentionQueue.capacity']` + - Writing retention queue length. + (Default: 4000) + +`node['bigdata']['btree.BTree.branchingFactor']` + - Branching factor for the journal's B-Tree. + (Default: 128) + +`node['bigdata']['journal.AbstractJournal.initialExtent']` + - Journal's initial extent + (Default: 209715200) + +`node['bigdata']['journal.AbstractJournal.maximumExtent']` + - Journal's maximum extent + (Default: 209715200) + +`node['bigdata']['rdf.sail.truthMaintenance']` + - Switch Truth Maintenance on/off. + (Default: false) + +`node['bigdata']['rdf.store.AbstractTripleStore.quads']` + - Switch Quads Mode on/off. + (Default: false) + +`node['bigdata']['rdf.store.AbstractTripleStore.statementIdentifiers']` + - Switch statement identifiers on/off. + (Default: false) + +`node['bigdata']['rdf.store.AbstractTripleStore.textIndex']` + - Switch text indexing on/off. + (Default: false) + +`node['bigdata']['rdf.store.AbstractTripleStore.axiomsClass']` + - The class to handle RDF axioms. + (Default: com.bigdata.rdf.axioms.NoAxioms) + +`node['bigdata']['namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor']` + - Branching factor for the journal's Lexical B-Tree. + (Default:- 400) + +`node['bigdata']['namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor']` + - Branching factor for the journal's SPO B-Tree. + (Default: 1024) + +`node['bigdata']['rdf.sail.bufferCapacity']` + - The number of statements to buffer before committing triples to the persistence layer. + (Default: 100000) + ### Attributes for Tomcat Based Install -<table> - <tr> - <th>Attribute</th> - <th>Description</th> - <th>Default</th> - </tr> - <tr> - <td><tt>node['bigdata'][:web_home]</tt></td> - <td>The web application root directory for bigdata.</td> - <td>node['tomcat'][:webapp_dir]/bigdata</td> - </tr> - <tr> - <td><tt>node['bigdata'][:log4j_properties]</tt></td> - <td>File path to the log4j properties file.</td> - <td>node['bigdata'][:web_home]/WEB-INF/classes/log4j.properties</td> - </tr> -</table> +`node['bigdata'][:web_home]` + - The web application root directory for bigdata. + (Default `node['tomcat'][:webapp_dir]`/bigdata) +`node['bigdata'][:log4j_properties]` + - File path to the log4j properties file. + (Default `node['bigdata'][:web_home]`/WEB-INF/classes/log4j.properties) + ### Attributes for NanoSparqlServer (NSS) Based Install -<table> - <tr> - <th>Attribute</th> - <th>Description</th> - <th>Default</th> - </tr> - <tr> - <td><tt>node['bigdata'][:user]</tt></td> - <td>The user to install and run bigdata under.</td> - <td>bigdata</td> - </tr> - <tr> - <td><tt>node['bigdata'][:group]</tt></td> - <td>The group to install and run bigdata under.</td> - <td>bigdata</td> - </tr> - <tr> - <td><tt>node['bigdata'][:jetty_dir]</tt></td> - <td>The Jetty root directory.</td> - <td>node['bigdata'][:home]/var/jetty</td> - </tr> -</table> +`node['bigdata'][:user]` + - The user to install and run bigdata under. + (Default: `bigdata`) +`node['bigdata'][:group]` + - The group to install and run bigdata under. + (Default: `bigdata`) + +`node['bigdata'][:jetty_dir]` + - The Jetty root directory. + (Default: `node['bigdata'][:home]`/var/jetty) + ### Attributes for MapGraph -<table> - <tr> - <th>Attribute</th> - <th>Description</th> - <th>Default</th> - </tr> - <tr> - <td><tt>node['mapgraph'][:svn_branch]</tt></td> - <td>The Subversion branch to retrieve source files from.</td> - <td>https://svn.code.sf.net/p/mpgraph/code/trunk</td> - </tr> - <tr> - <td><tt>node['mapgraph'][:source]</tt></td> - <td>The directory to retrieve Subversion contents into.</td> - <td>mapgraph-code</td> - </tr> -</table> +`node['mapgraph'][:svn_branch]` + - The Subversion branch to retrieve source files from. + (Default: https://svn.code.sf.net/p/mpgraph/code/trunk) +`node['mapgraph'][:source]` + - The directory to retrieve Subversion contents into. + (Default: mapgraph-code ) + Recipes ------- @@ -293,8 +234,9 @@ License and Authors ------------------- -Author:: Daniel Mekonnen [daniel<no-spam-at>systap.com] +Author:: Daniel Mekonnen [daniel<o-spam-at>systap.com] -~~~ + GNU GPLv2 - This pakcage may be resiributed under the same terms and conditions as the Bigdata project that it is a part of. -~~~ + + http://www.gnu.org/licenses/gpl-2.0.html This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-13 23:08:11
|
Revision: 8301 http://sourceforge.net/p/bigdata/code/8301 Author: dmekonnen Date: 2014-05-13 23:08:07 +0000 (Tue, 13 May 2014) Log Message: ----------- documentation format tweak. Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md 2014-05-13 20:01:22 UTC (rev 8300) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md 2014-05-13 23:08:07 UTC (rev 8301) @@ -232,7 +232,7 @@ #### Tomcat Example -``` + chef.json = { :bigdata => { :install_type => "tomcat", @@ -256,11 +256,10 @@ ] -``` #### NSS Example -``` + chef.json = { :bigdata => { :install_type => "nss" @@ -279,7 +278,6 @@ ] -``` ### Trouble Shooting @@ -297,6 +295,6 @@ ------------------- Author:: Daniel Mekonnen [daniel<no-spam-at>systap.com] -``` +~~~ GNU GPLv2 - This pakcage may be resiributed under the same terms and conditions as the Bigdata project that it is a part of. -``` +~~~ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-13 20:01:26
|
Revision: 8300 http://sourceforge.net/p/bigdata/code/8300 Author: dmekonnen Date: 2014-05-13 20:01:22 +0000 (Tue, 13 May 2014) Log Message: ----------- documentation updates. Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md 2014-05-13 19:45:03 UTC (rev 8299) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md 2014-05-13 20:01:22 UTC (rev 8300) @@ -1,6 +1,6 @@ Bigdata Cookbook ================ -The Bigdata cookbook provides the [bigdata v1.3.0](http://www.bigdata.com/bigdata/blog/) opensource triplestore/graph database. The cookbook provides recipes to install the Bigdata server as a web application under Tomcat, with its own embedded Jetty server (NSS - the NanoSparqlServer). The recipes will install pre-configured packages by default and optionally may build and install the server directly from source archive. +The Bigdata cookbook provides the [bigdata v1.3.0](http://www.bigdata.com/bigdata/blog/) opensource triplestore/graph database. The cookbook provides recipes to install the Bigdata server as a web application under Tomcat, with its own embedded Jetty server (NSS - the NanoSparqlServer). The recipes will install pre-configured packages by node and optionally may build and install the server directly from source archive. For more info on Bigdata please visit: @@ -25,120 +25,110 @@ <th>Default</th> </tr> <tr> - <td><tt>default['bigdata'][:home]</tt></td> - <td>Where the extracted bigdata.war contents are found</td> - <td>Default: <tt>/var/lib/bigdata</tt></td> + <td><tt>node['bigdata'][:home]</tt></td> + <td>The root directory for bigdata contents</td> + <td><tt>/var/lib/bigdata</tt></td> </tr> <tr> - <td><tt>default['bigdata'][:url]</tt></td> + <td><tt>node['bigdata'][:url]</tt></td> <td>Where to download the bigdata package file from</td> - <td>Tomcat: http://sourceforge.net/projects/bigdata/files/bigdata/1.3.0/bigdata.war/download<br/>NSS: http://bigdata.com/deploy/bigdata-1.3.0.tgz</td> + <td>Tomcat: http://softlayer-dal.dl.sourceforge.net/project/bigdata/bigdata/1.3.0/bigdata.war<br/>NSS: http://bigdata.com/deploy/bigdata-1.3.0.tgz</td> </tr> <tr> - <td><tt>default['bigdata'][:data_dir]</tt></td> - <td>Where the bigdata.jni should reside. Discussed in <a href="http://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer#Common_Startup_Problems">"Common Startup Problmems</a></td> - <td>default['bigdata'][:home]/data<br/>default['bigdata'][:home]/var/data</td> + <td><tt>node['bigdata'][:data_dir]</tt></td> + <td>Where the bigdata.jnl resides. Discussed in <a href="http://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer#Common_Startup_Problems">Common Startup Problmems</a></td> + <td>Tomcat: node['bigdata'][:home]/data<br/>NSS: node['bigdata'][:home]/var/data</td> </tr> <tr> - <td><tt>default['bigdata'][:log_dir]</tt></td> + <td><tt>node['bigdata'][:log_dir]</tt></td> <td>Where bigdata log files should reside (i.e. queryLog.csv, rules.log, queryRunStateLog.csv).</td> - <td>Tomcat: default['bigdata'][:home]/var/log<br/>NSS: default['bigdata'][:home]/var/log</td> + <td>Tomcat: node['bigdata'][:home]/var/log<br/>NSS: node['bigdata'][:home]/var/log</td> </tr> <tr> - <td><tt>default['bigdata'][:properties]</tt></td> - <td></td> - <td>default['bigdata'][:home]/RWStore.properties</td> + <td><tt>node['bigdata'][:properties]</tt></td> + <td>File path to the Bigdata properties file.</td> + <td>node['bigdata'][:home]/RWStore.properties</td> </tr> <tr> - <td><tt>default['bigdata'][:svn_branch]</tt></td> - <td></td> - <td>https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT_BRANCH_1_3_1</td> + <td><tt>node['bigdata'][:svn_branch]</tt></td> + <td>The Subversion branch to retrieve source files from.</td> + <td>Tomcat: https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA_RELEASE_1_3_0<br/>NSS: https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT_BRANCH_1_3_1</td> </tr> <tr> - <td><tt>default['bigdata'][:source]</tt></td> - <td>The directory to retrieve Subversion contents into</td> + <td><tt>node['bigdata'][:source]</tt></td> + <td>The directory to retrieve Subversion contents into.</td> <td>bigdata-code</td> </tr> <tr> - <td><tt>default['bigdata']['journal.AbstractJournal.bufferMode']</tt></td> - <td></td> + <td><tt>node['bigdata']['journal.AbstractJournal.bufferMode']</tt></td> + <td>Journal Buffer Mode</td> <td>DiskRW</td> </tr> <tr> - <td><tt>default['bigdata']['service.AbstractTransactionService.minReleaseAge']</tt></td> - <td></td> + <td><tt>node['bigdata']['service.AbstractTransactionService.minReleaseAge']</tt></td> + <td>Minimum Release Age</td> <td>1</td> </tr> <tr> - <td><tt>default['bigdata']['btree.writeRetentionQueue.capacity']</tt></td> - <td></td> + <td><tt>node['bigdata']['btree.writeRetentionQueue.capacity']</tt></td> + <td>Writing retention queue length.</td> <td>4000</td> </tr> <tr> - <td><tt>default['bigdata']['btree.BTree.branchingFactor']</tt></td> - <td></td> + <td><tt>node['bigdata']['btree.BTree.branchingFactor']</tt></td> + <td>Branching factor for the journal's B-Tree.</td> <td>128</td> </tr> <tr> - <td><tt>default['bigdata']['journal.AbstractJournal.initialExtent']</tt></td> - <td></td> + <td><tt>node['bigdata']['journal.AbstractJournal.initialExtent']</tt></td> + <td>Journal's initial extent</td> <td>209715200</td> </tr> <tr> - <td><tt>default['bigdata']['journal.AbstractJournal.maximumExtent']</tt></td> - <td></td> + <td><tt>node['bigdata']['journal.AbstractJournal.maximumExtent']</tt></td> + <td>Journal's maximum extent</td> <td>209715200</td> </tr> <tr> - <td><tt>default['bigdata']['rdf.sail.truthMaintenance']</tt></td> - <td></td> + <td><tt>node['bigdata']['rdf.sail.truthMaintenance']</tt></td> + <td>Switch Truth Maintenance on/off.</td> <td>false</td> </tr> <tr> - <td><tt>default['bigdata']['rdf.store.AbstractTripleStore.quads']</tt></td> - <td></td> + <td><tt>node['bigdata']['rdf.store.AbstractTripleStore.quads']</tt></td> + <td>Switch Quads Mode on/off.</td> <td>false</td> </tr> <tr> - <td><tt>default['bigdata']['rdf.store.AbstractTripleStore.statementIdentifiers']</tt></td> - <td></td> + <td><tt>node['bigdata']['rdf.store.AbstractTripleStore.statementIdentifiers']</tt></td> + <td>Switch statement identifiers on/off.</td> <td>false</td> </tr> <tr> - <td><tt>default['bigdata']['rdf.store.AbstractTripleStore.textIndex']</tt></td> - <td></td> + <td><tt>node['bigdata']['rdf.store.AbstractTripleStore.textIndex']</tt></td> + <td>Switch text indexing on/off.</td> <td>false</td> </tr> <tr> - <td><tt>default['bigdata']['rdf.store.AbstractTripleStore.axiomsClass']</tt></td> - <td></td> + <td><tt>node['bigdata']['rdf.store.AbstractTripleStore.axiomsClass']</tt></td> + <td>The class to handle RDF axioms.</td> <td>com.bigdata.rdf.axioms.NoAxioms</td> </tr> <tr> - <td><tt>default['bigdata']['namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor']</tt></td> - <td></td> + <td><tt>node['bigdata']['namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor']</tt></td> + <td>Branching factor for the journal's Lexical B-Tree.</td> <td>400</td> </tr> <tr> - <td><tt>default['bigdata']['namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor']</tt></td> - <td></td> + <td><tt>node['bigdata']['namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor']</tt></td> + <td>Branching factor for the journal's SPO B-Tree.</td> <td>1024</td> </tr> <tr> - <td><tt>default['bigdata']['rdf.sail.bufferCapacity']</tt></td> - <td>The number of statements to buffer before committing to the persistence layer.</td> + <td><tt>node['bigdata']['rdf.sail.bufferCapacity']</tt></td> + <td>The number of statements to buffer before committing triples to the persistence layer.</td> <td>100000</td> </tr> - <tr> - <td><tt>default['mapgraph'][:source]</tt></td> - <td></td> - <td>mapgraph-code</td> - </tr> - <tr> - <td><tt>default['mapgraph'][:svn_branch]</tt></td> - <td></td> - <td>https://svn.code.sf.net/p/mpgraph/code/trunk</td> - </tr> </table> ### Attributes for Tomcat Based Install @@ -150,14 +140,14 @@ <th>Default</th> </tr> <tr> - <td><tt>default['bigdata'][:web_home]</tt></td> - <td></td> - <td>default['tomcat'][:webapp_dir]/bigdata</td> + <td><tt>node['bigdata'][:web_home]</tt></td> + <td>The web application root directory for bigdata.</td> + <td>node['tomcat'][:webapp_dir]/bigdata</td> </tr> <tr> - <td><tt>default['bigdata'][:log4j_properties]</tt></td> - <td></td> - <td>Tomcat:default['bigdata'][:web_home]/WEB-INF/classes/log4j.properties</td> + <td><tt>node['bigdata'][:log4j_properties]</tt></td> + <td>File path to the log4j properties file.</td> + <td>node['bigdata'][:web_home]/WEB-INF/classes/log4j.properties</td> </tr> </table> @@ -170,19 +160,19 @@ <th>Default</th> </tr> <tr> - <td><tt>default['bigdata'][:user]</tt></td> - <td></td> + <td><tt>node['bigdata'][:user]</tt></td> + <td>The user to install and run bigdata under.</td> <td>bigdata</td> </tr> <tr> - <td><tt>default['bigdata'][:group]</tt></td> - <td></td> + <td><tt>node['bigdata'][:group]</tt></td> + <td>The group to install and run bigdata under.</td> <td>bigdata</td> </tr> <tr> - <td><tt>default['bigdata'][:jetty_dir]</tt></td> - <td></td> - <td>default['bigdata'][:home]/var/jetty</td> + <td><tt>node['bigdata'][:jetty_dir]</tt></td> + <td>The Jetty root directory.</td> + <td>node['bigdata'][:home]/var/jetty</td> </tr> </table> @@ -195,14 +185,14 @@ <th>Default</th> </tr> <tr> - <td><tt>default['mapgraph'][:source]</tt></td> - <td></td> - <td>mapgraph-code</td> + <td><tt>node['mapgraph'][:svn_branch]</tt></td> + <td>The Subversion branch to retrieve source files from.</td> + <td>https://svn.code.sf.net/p/mpgraph/code/trunk</td> </tr> <tr> - <td><tt>default['mapgraph'][:svn_branch]</tt></td> - <td></td> - <td>https://svn.code.sf.net/p/mpgraph/code/trunk</td> + <td><tt>node['mapgraph'][:source]</tt></td> + <td>The directory to retrieve Subversion contents into.</td> + <td>mapgraph-code</td> </tr> </table> @@ -210,11 +200,11 @@ Recipes ------- -A default recipe is not provided by the Bigdata cookbook. The user is given the option to install the Bigdata server under Tomcat or as a Jetty application. Under both options, Bigdata may optinally be built directly from the a Subversion source code branch. +A node recipe is not provided by the Bigdata cookbook. The user is given the option to install the Bigdata server under Tomcat or as a Jetty application. Under both options, Bigdata may optinally be built directly from the a Subversion source code branch. ### tomcat -Installs the [Tomcat](http://tomcat.apache.org/) server and then bigdata as a web application. Bigdata will be configured according to the attributes. If no attributes are given, Bigdata will be installed with the systems defaults. +Installs the [Tomcat](http://tomcat.apache.org/) server and then bigdata as a web application. Bigdata will be configured according to the attributes. If no attributes are given, Bigdata will be installed with the systems nodes. If the `build_from_svn` attribute is set to `true` Bigdata will be build from the Subversion repository given in the `svn_branch` attribute. @@ -246,7 +236,8 @@ chef.json = { :bigdata => { :install_type => "tomcat", - :build_from_svn => true + :build_from_svn => true, + :svn_branch => "https://svn.code.sf.net/p/bigdata/code/branches/BTREE_BUFFER_BRANCH/" }, :java => { "install_flavor" => "oracle", Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb 2014-05-13 19:45:03 UTC (rev 8299) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb 2014-05-13 20:01:22 UTC (rev 8300) @@ -1,8 +1,8 @@ name 'bigdata' maintainer 'Daniel Mekonnen' maintainer_email 'daniel<no-spam-at>systap.com' -license 'All rights reserved' -description 'Installs/Configures bigdata' +license 'GNU GPLv2' +description 'Installs/Configures the Bigdata triple/graph store.' long_description IO.read(File.join(File.dirname(__FILE__), 'README.md')) version '0.1.1' depends 'apt' This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2014-05-13 19:45:07
|
Revision: 8299 http://sourceforge.net/p/bigdata/code/8299 Author: thompsonbry Date: 2014-05-13 19:45:03 +0000 (Tue, 13 May 2014) Log Message: ----------- Commit: possible fix for test failure in test_webapp_structure_counters. {{{ java.io.UnsupportedEncodingException: 'UTF-8' at sun.nio.cs.StreamDecoder.forInputStreamReader(StreamDecoder.java:71) at java.io.InputStreamReader.<init>(InputStreamReader.java:100) at org.apache.http.util.EntityUtils.toString(EntityUtils.java:195) at org.apache.http.util.EntityUtils.toString(EntityUtils.java:221) at com.bigdata.rdf.sail.webapp.TestNanoSparqlClient.doGET(TestNanoSparqlClient.java:254) at com.bigdata.rdf.sail.webapp.TestNanoSparqlClient.test_webapp_structure_counters(TestNanoSparqlClient.java:210) }}} Changes are to CountersServlet (change in how the encoding is set on the http response) and TestNanoSparqlClient (we were getting the HttpEntity twice - it is doubtless a cached object reference so I doubt this is the issue). Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/CountersServlet.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlClient.java Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/CountersServlet.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/CountersServlet.java 2014-05-13 19:32:10 UTC (rev 8298) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/CountersServlet.java 2014-05-13 19:45:03 UTC (rev 8299) @@ -217,8 +217,15 @@ resp.setStatus(HTTP_OK); - resp.setContentType(mimeType + "; charset='" + charset + "'"); + resp.setContentType(mimeType); + if (format.hasCharset()) { + + // Note: Binary encodings do not specify charset. + resp.setCharacterEncoding(format.getCharset().name()); + + } + /* * Sets the cache behavior -- the data should be good for up to 60 * seconds unless you change the query parameters. These cache control Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlClient.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlClient.java 2014-05-13 19:32:10 UTC (rev 8298) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlClient.java 2014-05-13 19:45:03 UTC (rev 8299) @@ -251,7 +251,7 @@ entity = response.getEntity(); - final String content = EntityUtils.toString(response.getEntity()); + final String content = EntityUtils.toString(entity); return content; @@ -260,6 +260,7 @@ try { EntityUtils.consume(entity); } catch (IOException ex) { + log.warn(ex, ex); } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2014-05-13 19:32:16
|
Revision: 8298 http://sourceforge.net/p/bigdata/code/8298 Author: mrpersonick Date: 2014-05-13 19:32:10 +0000 (Tue, 13 May 2014) Log Message: ----------- full blueprints integration commit Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/com/bigdata/blueprints/TestBigdataGraphEmbedded.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java Added Paths: ----------- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphBulkLoad.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphEmbedded.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphQuery.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataPredicate.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataRDFFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataVertex.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BlueprintsRDFFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/com/bigdata/blueprints/AbstractTestBigdataGraph.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/com/bigdata/blueprints/TestAll.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/com/bigdata/blueprints/TestBigdataGraphClient.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/client/BigdataSailNSSWrapper.java branches/BIGDATA_RELEASE_1_3_0/build.xml Removed Paths: ------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphBulkLoad.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphEmbedded.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataRDFFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataVertex.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BlueprintsRDFFactory.java branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/edge.rq branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/edges.rq branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/edgesByProperty.rq branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/vertex.rq branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/test/com/bigdata/blueprints/TestBigdataGraphClient.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/BigdataSailFactory.java branches/BIGDATA_RELEASE_1_3_0/build.xml Deleted: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java 2014-05-13 18:15:26 UTC (rev 8297) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java 2014-05-13 19:32:10 UTC (rev 8298) @@ -1,107 +0,0 @@ -/** -Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. - -Contact: - SYSTAP, LLC - 4501 Tower Road - Greensboro, NC 27410 - lic...@bi... - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; version 2 of the License. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software -Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ -package com.bigdata.blueprints; - -import java.util.Arrays; -import java.util.List; - -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDFS; - -import com.tinkerpop.blueprints.Direction; -import com.tinkerpop.blueprints.Edge; -import com.tinkerpop.blueprints.Vertex; - -/** - * Edge implementation that wraps an Edge statement and points to a - * {@link BigdataGraph} instance. - * - * @author mikepersonick - * - */ -public class BigdataEdge extends BigdataElement implements Edge { - - private static final List<String> blacklist = Arrays.asList(new String[] { - "id", "", "label" - }); - - protected final Statement stmt; - - public BigdataEdge(final Statement stmt, final BigdataGraph graph) { - super(stmt.getPredicate(), graph); - - this.stmt = stmt; - } - - @Override - public Object getId() { - return graph.factory.fromEdgeURI(uri); - } - - @Override - public void remove() { - graph.removeEdge(this); - } - - @Override - public String getLabel() { - return (String) graph.getProperty(uri, RDFS.LABEL); - } - - @Override - public Vertex getVertex(final Direction dir) throws IllegalArgumentException { - - if (dir == Direction.BOTH) { - throw new IllegalArgumentException(); - } - - final URI uri = (URI) - (dir == Direction.OUT ? stmt.getSubject() : stmt.getObject()); - - final String id = graph.factory.fromVertexURI(uri); - - return graph.getVertex(id); - - } - - @Override - public void setProperty(final String property, final Object val) { - - if (property == null || blacklist.contains(property)) { - throw new IllegalArgumentException(); - } - - super.setProperty(property, val); - - } - - @Override - public String toString() { - final URI s = (URI) stmt.getSubject(); - final URI p = (URI) stmt.getPredicate(); - final URI o = (URI) stmt.getObject(); - return "e["+p.getLocalName()+"]["+s.getLocalName()+"->"+o.getLocalName()+"]"; - } - -} Added: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java 2014-05-13 19:32:10 UTC (rev 8298) @@ -0,0 +1,115 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.blueprints; + +import java.util.Arrays; +import java.util.List; + +import org.openrdf.model.Statement; +import org.openrdf.model.URI; +import org.openrdf.model.vocabulary.RDFS; + +import com.tinkerpop.blueprints.Direction; +import com.tinkerpop.blueprints.Edge; +import com.tinkerpop.blueprints.Vertex; + +/** + * Edge implementation that wraps an Edge statement and points to a + * {@link BigdataGraph} instance. + * + * @author mikepersonick + * + */ +public class BigdataEdge extends BigdataElement implements Edge { + + private static final List<String> blacklist = Arrays.asList(new String[] { + "id", "", "label" + }); + + protected final Statement stmt; + + public BigdataEdge(final Statement stmt, final BigdataGraph graph) { + super(stmt.getPredicate(), graph); + + this.stmt = stmt; + } + + @Override + public Object getId() { + + return graph.factory.fromEdgeURI(uri); + + } + + @Override + public void remove() { + + graph.removeEdge(this); + + } + + @Override + public String getLabel() { + + return (String) graph.getProperty(uri, RDFS.LABEL); + + } + + @Override + public Vertex getVertex(final Direction dir) throws IllegalArgumentException { + + if (dir == Direction.BOTH) { + throw new IllegalArgumentException(); + } + + final URI uri = (URI) + (dir == Direction.OUT ? stmt.getSubject() : stmt.getObject()); + + final String id = graph.factory.fromVertexURI(uri); + + return graph.getVertex(id); + + } + + @Override + public void setProperty(final String prop, final Object val) { + + if (prop == null || blacklist.contains(prop)) { + throw new IllegalArgumentException(); + } + + super.setProperty(prop, val); + + } + + @Override + public String toString() { + + final URI s = (URI) stmt.getSubject(); + final URI p = (URI) stmt.getPredicate(); + final URI o = (URI) stmt.getObject(); + return "e["+p.getLocalName()+"]["+s.getLocalName()+"->"+o.getLocalName()+"]"; + + } + +} Property changes on: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property Deleted: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java 2014-05-13 18:15:26 UTC (rev 8297) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java 2014-05-13 19:32:10 UTC (rev 8298) @@ -1,134 +0,0 @@ -/** -Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. - -Contact: - SYSTAP, LLC - 4501 Tower Road - Greensboro, NC 27410 - lic...@bi... - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; version 2 of the License. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software -Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ -package com.bigdata.blueprints; - -import java.util.Arrays; -import java.util.List; -import java.util.Set; - -import org.openrdf.model.Literal; -import org.openrdf.model.URI; - -import com.tinkerpop.blueprints.Element; - -/** - * Base class for {@link BigdataVertex} and {@link BigdataEdge}. Handles - * property-related methods. - * - * @author mikepersonick - * - */ -public abstract class BigdataElement implements Element { - - private static final List<String> blacklist = Arrays.asList(new String[] { - "id", "" - }); - - protected final URI uri; - protected final BigdataGraph graph; - - public BigdataElement(final URI uri, final BigdataGraph graph) { - this.uri = uri; - this.graph = graph; - } - - @Override - @SuppressWarnings("unchecked") - public <T> T getProperty(final String property) { - - final URI p = graph.factory.toPropertyURI(property); - - return (T) graph.getProperty(uri, p); - - } - - @Override - public Set<String> getPropertyKeys() { - - return graph.getPropertyKeys(uri); - - } - - @Override - @SuppressWarnings("unchecked") - public <T> T removeProperty(final String property) { - - final URI p = graph.factory.toPropertyURI(property); - - return (T) graph.removeProperty(uri, p); - - } - - @Override - public void setProperty(final String property, final Object val) { - - if (property == null || blacklist.contains(property)) { - throw new IllegalArgumentException(); - } - - final URI p = graph.factory.toPropertyURI(property); - - final Literal o = graph.factory.toLiteral(val); - - graph.setProperty(uri, p, o); - - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((graph == null) ? 0 : graph.hashCode()); - result = prime * result + ((uri == null) ? 0 : uri.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - BigdataElement other = (BigdataElement) obj; - if (graph == null) { - if (other.graph != null) - return false; - } else if (!graph.equals(other.graph)) - return false; - if (uri == null) { - if (other.uri != null) - return false; - } else if (!uri.equals(other.uri)) - return false; - return true; - } - - @Override - public String toString() { - return uri.toString(); - } - - -} Added: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java 2014-05-13 19:32:10 UTC (rev 8298) @@ -0,0 +1,154 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.blueprints; + +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +import org.openrdf.model.URI; + +import com.tinkerpop.blueprints.Element; + +/** + * Base class for {@link BigdataVertex} and {@link BigdataEdge}. Handles + * property-related methods. + * + * @author mikepersonick + * + */ +public abstract class BigdataElement implements Element { + + private static final List<String> blacklist = Arrays.asList(new String[] { + "id", "" + }); + + protected final URI uri; + protected final BigdataGraph graph; + + public BigdataElement(final URI uri, final BigdataGraph graph) { + this.uri = uri; + this.graph = graph; + } + + @Override + @SuppressWarnings("unchecked") + public <T> T getProperty(final String property) { + + return (T) graph.getProperty(uri, property); + + } + + @Override + public Set<String> getPropertyKeys() { + + return graph.getPropertyKeys(uri); + + } + + @Override + @SuppressWarnings("unchecked") + public <T> T removeProperty(final String property) { + + return (T) graph.removeProperty(uri, property); + + } + + @Override + public void setProperty(final String prop, final Object val) { + + if (prop == null || blacklist.contains(prop)) { + throw new IllegalArgumentException(); + } + + graph.setProperty(uri, prop, val); + + } + + /** + * Simple extension for multi-valued properties. + */ + public void addProperty(final String prop, final Object val) { + + if (prop == null || blacklist.contains(prop)) { + throw new IllegalArgumentException(); + } + + graph.addProperty(uri, prop, val); + + } + + /** + * Simple extension for multi-valued properties. + */ + @SuppressWarnings("unchecked") + public <T> List<T> getProperties(final String property) { + + return (List<T>) graph.getProperties(uri, property); + + } + + /** + * Generated code. + */ + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((graph == null) ? 0 : graph.hashCode()); + result = prime * result + ((uri == null) ? 0 : uri.hashCode()); + return result; + } + + /** + * Generated code. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + BigdataElement other = (BigdataElement) obj; + if (graph == null) { + if (other.graph != null) + return false; + } else if (!graph.equals(other.graph)) + return false; + if (uri == null) { + if (other.uri != null) + return false; + } else if (!uri.equals(other.uri)) + return false; + return true; + } + + @Override + public String toString() { + return uri.toString(); + } + + +} Property changes on: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property Deleted: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java 2014-05-13 18:15:26 UTC (rev 8297) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java 2014-05-13 19:32:10 UTC (rev 8298) @@ -1,851 +0,0 @@ -/** -Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. - -Contact: - SYSTAP, LLC - 4501 Tower Road - Greensboro, NC 27410 - lic...@bi... - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; version 2 of the License. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software -Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ -package com.bigdata.blueprints; - -import info.aduna.iteration.CloseableIteration; - -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; -import java.util.UUID; - -import org.apache.commons.io.IOUtils; -import org.openrdf.OpenRDFException; -import org.openrdf.model.Literal; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.GraphQueryResult; -import org.openrdf.query.QueryLanguage; -import org.openrdf.repository.RepositoryConnection; -import org.openrdf.repository.RepositoryResult; - -import com.bigdata.rdf.store.BD; -import com.tinkerpop.blueprints.Direction; -import com.tinkerpop.blueprints.Edge; -import com.tinkerpop.blueprints.Features; -import com.tinkerpop.blueprints.Graph; -import com.tinkerpop.blueprints.GraphQuery; -import com.tinkerpop.blueprints.Vertex; -import com.tinkerpop.blueprints.util.DefaultGraphQuery; -import com.tinkerpop.blueprints.util.io.graphml.GraphMLReader; - -/** - * A base class for a Blueprints wrapper around a bigdata back-end. - * - * @author mikepersonick - * - */ -public abstract class BigdataGraph implements Graph { - - public static final URI VERTEX = new URIImpl(BD.NAMESPACE + "Vertex"); - - public static final URI EDGE = new URIImpl(BD.NAMESPACE + "Edge"); - -// final BigdataSailRepository repo; -// -// transient BigdataSailRepositoryConnection cxn; - - final BlueprintsRDFFactory factory; - -// public BigdataGraph(final BigdataSailRepository repo) { -// this(repo, BigdataRDFFactory.INSTANCE); -// } - - public BigdataGraph(//final BigdataSailRepository repo, - final BlueprintsRDFFactory factory) { -// try { -// this.repo = repo; -// this.cxn = repo.getUnisolatedConnection(); -// this.cxn.setAutoCommit(false); - this.factory = factory; -// } catch (RepositoryException ex) { -// throw new RuntimeException(ex); -// } - } - - public String toString() { - return getClass().getSimpleName().toLowerCase(); - } - - /** - * Post a GraphML file to the remote server. (Bulk-upload operation.) - */ - public void loadGraphML(final String file) throws Exception { - GraphMLReader.inputGraph(this, file); - } - - protected abstract RepositoryConnection cxn() throws Exception; - -// public BigdataSailRepositoryConnection getConnection() { -// return this.cxn; -// } -// -// public BlueprintsRDFFactory getFactory() { -// return this.factory; -// } - -// public Value getValue(final URI s, final URI p) { -// -// try { -// -// final RepositoryResult<Statement> result = -// cxn.getStatements(s, p, null, false); -// -// if (result.hasNext()) { -// -// final Value o = result.next().getObject(); -// -// if (result.hasNext()) { -// throw new RuntimeException(s -// + ": more than one value for p: " + p -// + ", did you mean to call getValues()?"); -// } -// -// return o; -// -// } -// -// return null; -// -// } catch (Exception ex) { -// throw new RuntimeException(ex); -// } -// -// } - - public Object getProperty(final URI s, final URI p) { - - try { - - final RepositoryResult<Statement> result = - cxn().getStatements(s, p, null, false); - - if (result.hasNext()) { - - final Value value = result.next().getObject(); - - if (result.hasNext()) { - throw new RuntimeException(s - + ": more than one value for p: " + p - + ", did you mean to call getValues()?"); - } - - if (!(value instanceof Literal)) { - throw new RuntimeException("not a property: " + value); - } - - final Literal lit = (Literal) value; - - return factory.fromLiteral(lit); - - } - - return null; - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - -// public List<Value> getValues(final URI s, final URI p) { -// -// try { -// -// final RepositoryResult<Statement> result = -// cxn().getStatements(s, p, null, false); -// -// final List<Value> values = new LinkedList<Value>(); -// -// while (result.hasNext()) { -// -// final Value o = result.next().getObject(); -// -// values.add(o); -// -// } -// -// return values; -// -// } catch (Exception ex) { -// throw new RuntimeException(ex); -// } -// -// } - - public List<Object> getProperties(final URI s, final URI p) { - - try { - - final RepositoryResult<Statement> result = - cxn().getStatements(s, p, null, false); - - final List<Object> props = new LinkedList<Object>(); - - while (result.hasNext()) { - - final Value value = result.next().getObject(); - - if (!(value instanceof Literal)) { - throw new RuntimeException("not a property: " + value); - } - - final Literal lit = (Literal) value; - - props.add(factory.fromLiteral(lit)); - - } - - return props; - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - public Set<String> getPropertyKeys(final URI s) { - - try { - - final RepositoryResult<Statement> result = - cxn().getStatements(s, null, null, false); - - final Set<String> properties = new LinkedHashSet<String>(); - - while (result.hasNext()) { - - final Statement stmt = result.next(); - - if (!(stmt.getObject() instanceof Literal)) { - continue; - } - - if (stmt.getPredicate().equals(RDFS.LABEL)) { - continue; - } - - final String p = - factory.fromPropertyURI(stmt.getPredicate()); - - properties.add(p); - - } - - return properties; - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - public Object removeProperty(final URI s, final URI p) { - - try { - - final Object oldVal = getProperty(s, p); - - cxn().remove(s, p, null); - - return oldVal; - - } catch (Exception e) { - throw new RuntimeException(e); - } - - } - - public void setProperty(final URI s, final URI p, final Literal o) { - - try { - - cxn().remove(s, p, null); - - cxn().add(s, p, o); - - } catch (Exception e) { - throw new RuntimeException(e); - } - - } - - @Override - public Edge addEdge(final Object key, final Vertex from, final Vertex to, - final String label) { - - if (label == null) { - throw new IllegalArgumentException(); - } - - final String eid = key != null ? key.toString() : UUID.randomUUID().toString(); - - final URI edgeURI = factory.toEdgeURI(eid); - - if (key != null) { - - final Edge edge = getEdge(key); - - if (edge != null) { - if (!(edge.getVertex(Direction.OUT).equals(from) && - (edge.getVertex(Direction.OUT).equals(to)))) { - throw new IllegalArgumentException("edge already exists: " + key); - } - } - - } - - try { - -// if (cxn().hasStatement(edgeURI, RDF.TYPE, EDGE, false)) { -// throw new IllegalArgumentException("edge " + eid + " already exists"); -// } - - final URI fromURI = factory.toVertexURI(from.getId().toString()); - final URI toURI = factory.toVertexURI(to.getId().toString()); - - cxn().add(fromURI, edgeURI, toURI); - cxn().add(edgeURI, RDF.TYPE, EDGE); - cxn().add(edgeURI, RDFS.LABEL, factory.toLiteral(label)); - - return new BigdataEdge(new StatementImpl(fromURI, edgeURI, toURI), this); - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Vertex addVertex(final Object key) { - - try { - - final String vid = key != null ? - key.toString() : UUID.randomUUID().toString(); - - final URI uri = factory.toVertexURI(vid); - -// if (cxn().hasStatement(vertexURI, RDF.TYPE, VERTEX, false)) { -// throw new IllegalArgumentException("vertex " + vid + " already exists"); -// } - - cxn().add(uri, RDF.TYPE, VERTEX); - - return new BigdataVertex(uri, this); - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Edge getEdge(final Object key) { - - if (key == null) - throw new IllegalArgumentException(); - - try { - - final URI edge = factory.toEdgeURI(key.toString()); - - final RepositoryResult<Statement> result = - cxn().getStatements(null, edge, null, false); - - if (result.hasNext()) { - - final Statement stmt = result.next(); - - if (result.hasNext()) { - throw new RuntimeException( - "duplicate edge: " + key); - } - - return new BigdataEdge(stmt, this); - - } - - return null; - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Iterable<Edge> getEdges() { - - final URI wild = null; - return getEdges(wild, wild); - - } - - public Iterable<Edge> getEdges(final URI s, final URI o, final String... labels) { - - try { - -// final RepositoryResult<Statement> result = -// cxn().getStatements(s, p, o, false); -// -// return new EdgeIterable(result); - - final StringBuilder sb = new StringBuilder(); - sb.append("construct { ?from ?edge ?to . } where {\n"); - sb.append("?edge rdf:type bd:Edge . ?from ?edge ?to .\n"); - if (labels != null && labels.length > 0) { - if (labels.length == 1) { - sb.append("?edge rdfs:label \"").append(labels[0]).append("\" .\n"); - } else { - sb.append("?edge rdfs:label ?label .\n"); - sb.append("filter(?label in ("); - for (String label : labels) { - sb.append("\""+label+"\", "); - } - sb.setLength(sb.length()-2); - sb.append(")) .\n"); - } - } - sb.append("}"); - - final String queryStr = sb.toString() - .replace("?from", s != null ? "<"+s+">" : "?from") - .replace("?to", o != null ? "<"+o+">" : "?to"); - - final org.openrdf.query.GraphQuery query = - cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); - - final GraphQueryResult stmts = query.evaluate(); - - return new EdgeIterable(stmts); - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - public Iterable<Vertex> getVertices(final URI s, final URI o, - final String... labels) { - - if (s != null && o != null) { - throw new IllegalArgumentException(); - } - - if (s == null && o == null) { - throw new IllegalArgumentException(); - } - - try { - -// final RepositoryResult<Statement> result = -// cxn().getStatements(s, null, o, false); -// -// return new VertexIterable(result, s == null); - - final StringBuilder sb = new StringBuilder(); - sb.append("construct { ?from ?edge ?to . } where {\n"); - sb.append("?edge rdf:type bd:Edge . ?from ?edge ?to .\n"); - if (labels != null && labels.length > 0) { - if (labels.length == 1) { - sb.append("?edge rdfs:label \"").append(labels[0]).append("\" .\n"); - } else { - sb.append("?edge rdfs:label ?label .\n"); - sb.append("filter(?label in ("); - for (String label : labels) { - sb.append("\""+label+"\", "); - } - sb.setLength(sb.length()-2); - sb.append(")) .\n"); - } - } - sb.append("}"); - - final String queryStr = sb.toString() - .replace("?from", s != null ? "<"+s+">" : "?from") - .replace("?to", o != null ? "<"+o+">" : "?to"); - - final org.openrdf.query.GraphQuery query = - cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); - - final GraphQueryResult stmts = query.evaluate(); - - return new VertexIterable(stmts, s == null); - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - public final <T> Iterable<T> fuse(final Iterable<T>... args) { - - return new FusedIterable<T>(args); - } - - - @Override - public Iterable<Edge> getEdges(final String prop, final Object val) { - - final URI p = factory.toPropertyURI(prop); - final Literal o = factory.toLiteral(val); - - try { - - final String queryStr = IOUtils.toString( - getClass().getResourceAsStream("edgesByProperty.rq")) - .replace("?prop", "<"+p+">") - .replace("?val", o.toString()); - - final org.openrdf.query.GraphQuery query = - cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); - - final GraphQueryResult stmts = query.evaluate(); - - return new EdgeIterable(stmts); - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Features getFeatures() { - - return FEATURES; - - } - - @Override - public Vertex getVertex(final Object key) { - - if (key == null) - throw new IllegalArgumentException(); - - final URI uri = factory.toVertexURI(key.toString()); - try { - if (cxn().hasStatement(uri, RDF.TYPE, VERTEX, false)) { - return new BigdataVertex(uri, this); - } - return null; - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Iterable<Vertex> getVertices() { - - try { - final RepositoryResult<Statement> result = - cxn().getStatements(null, RDF.TYPE, VERTEX, false); - return new VertexIterable(result, true); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Iterable<Vertex> getVertices(String prop, Object val) { - - final URI p = factory.toPropertyURI(prop); - final Literal o = factory.toLiteral(val); - try { - final RepositoryResult<Statement> result = - cxn().getStatements(null, p, o, false); - return new VertexIterable(result, true); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public GraphQuery query() { - return new DefaultGraphQuery(this); - } - - @Override - public void removeEdge(final Edge edge) { - try { - final URI uri = factory.toURI(edge); - if (!cxn().hasStatement(uri, RDF.TYPE, EDGE, false)) { - throw new IllegalStateException(); - } - final URI wild = null; - // remove the edge statement - cxn().remove(wild, uri, wild); - // remove its properties - cxn().remove(uri, wild, wild); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - @Override - public void removeVertex(final Vertex vertex) { - try { - final URI uri = factory.toURI(vertex); - if (!cxn().hasStatement(uri, RDF.TYPE, VERTEX, false)) { - throw new IllegalStateException(); - } - final URI wild = null; - // remove outgoing links and properties - cxn().remove(uri, wild, wild); - // remove incoming links - cxn().remove(wild, wild, uri); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - -// @Override -// public void commit() { -// try { -// cxn().commit(); -// } catch (RepositoryException e) { -// throw new RuntimeException(e); -// } -// } -// -// @Override -// public void rollback() { -// try { -// cxn().rollback(); -// cxn.close(); -// cxn = repo.getUnisolatedConnection(); -// cxn.setAutoCommit(false); -// } catch (RepositoryException e) { -// throw new RuntimeException(e); -// } -// } -// -// @Override -// public void shutdown() { -// try { -// cxn.close(); -// repo.shutDown(); -// } catch (RepositoryException e) { -// throw new RuntimeException(e); -// } -// } -// -// @Override -// @Deprecated -// public void stopTransaction(Conclusion arg0) { -// } - - public class VertexIterable implements Iterable<Vertex>, Iterator<Vertex> { - - private final CloseableIteration<Statement, ? extends OpenRDFException> stmts; - - private final boolean subject; - - private final List<Vertex> cache; - - public VertexIterable( - final CloseableIteration<Statement, ? extends OpenRDFException> stmts, - final boolean subject) { - this.stmts = stmts; - this.subject = subject; - this.cache = new LinkedList<Vertex>(); - } - - @Override - public boolean hasNext() { - try { - return stmts.hasNext(); - } catch (OpenRDFException e) { - throw new RuntimeException(e); - } - } - - @Override - public Vertex next() { - try { - final Statement stmt = stmts.next(); - final URI v = (URI) - (subject ? stmt.getSubject() : stmt.getObject()); - if (!hasNext()) { - stmts.close(); - } - final Vertex vertex = new BigdataVertex(v, BigdataGraph.this); - cache.add(vertex); - return vertex; - } catch (OpenRDFException e) { - throw new RuntimeException(e); - } - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - - @Override - public Iterator<Vertex> iterator() { - return hasNext() ? this : cache.iterator(); - } - - } - - public class EdgeIterable implements Iterable<Edge>, Iterator<Edge> { - - private final CloseableIteration<Statement, ? extends OpenRDFException> stmts; - - private final List<Edge> cache; - - public EdgeIterable( - final CloseableIteration<Statement, ? extends OpenRDFException> stmts) { - this.stmts = stmts; - this.cache = new LinkedList<Edge>(); - } - - @Override - public boolean hasNext() { - try { - return stmts.hasNext(); - } catch (OpenRDFException e) { - throw new RuntimeException(e); - } - } - - @Override - public Edge next() { - try { - final Statement stmt = stmts.next(); - if (!hasNext()) { - stmts.close(); - } - final Edge edge = new BigdataEdge(stmt, BigdataGraph.this); - cache.add(edge); - return edge; - } catch (OpenRDFException e) { - throw new RuntimeException(e); - } - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - - @Override - public Iterator<Edge> iterator() { - return hasNext() ? this : cache.iterator(); - } - - } - - public class FusedIterable<T> implements Iterable<T>, Iterator<T> { - - private final Iterable<T>[] args; - - private transient int i = 0; - - private transient Iterator<T> curr; - - public FusedIterable(final Iterable<T>... args) { - this.args = args; - this.curr = args[0].iterator(); - } - - @Override - public boolean hasNext() { - if (curr.hasNext()) { - return true; - } - while (!curr.hasNext() && i < (args.length-1)) { - curr = args[++i].iterator(); - if (curr.hasNext()) { - return true; - } - } - return false; - } - - @Override - public T next() { - return curr.next(); - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - - @Override - public Iterator<T> iterator() { - return this; - } - - } - - protected static final Features FEATURES = new Features(); - - static { - - FEATURES.supportsSerializableObjectProperty = false; - FEATURES.supportsBooleanProperty = true; - FEATURES.supportsDoubleProperty = true; - FEATURES.supportsFloatProperty = true; - FEATURES.supportsIntegerProperty = true; - FEATURES.supportsPrimitiveArrayProperty = false; - FEATURES.supportsUniformListProperty = false; - FEATURES.supportsMixedListProperty = false; - FEATURES.supportsLongProperty = true; - FEATURES.supportsMapProperty = false; - FEATURES.supportsStringProperty = true; - - FEATURES.supportsDuplicateEdges = true; - FEATURES.supportsSelfLoops = true; - FEATURES.isPersistent = true; - FEATURES.isWrapper = false; - FEATURES.supportsVertexIteration = true; - FEATURES.supportsEdgeIteration = true; - FEATURES.supportsVertexIndex = false; - FEATURES.supportsEdgeIndex = false; - FEATURES.ignoresSuppliedIds = true; - FEATURES.supportsTransactions = false; - FEATURES.supportsIndices = true; - FEATURES.supportsKeyIndices = true; - FEATURES.supportsVertexKeyIndex = true; - FEATURES.supportsEdgeKeyIndex = true; - FEATURES.supportsEdgeRetrieval = true; - FEATURES.supportsVertexProperties = true; - FEATURES.supportsEdgeProperties = true; - FEATURES.supportsThreadedTransactions = false; - } - -} Added: branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java 2014-05-13 19:32:10 UTC (rev 8298) @@ -0,0 +1,1017 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.blueprints; + +import info.aduna.iteration.CloseableIteration; + +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.UUID; + +import org.openrdf.OpenRDFException; +import org.openrdf.model.Literal; +import org.openrdf.model.Statement; +import org.openrdf.model.URI; +import org.openrdf.model.Value; +import org.openrdf.model.impl.StatementImpl; +import org.openrdf.model.impl.URIImpl; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.RDFS; +import org.openrdf.query.GraphQueryResult; +import org.openrdf.query.QueryLanguage; +import org.openrdf.repository.RepositoryConnection; +import org.openrdf.repository.RepositoryResult; + +import com.bigdata.rdf.store.BD; +import com.tinkerpop.blueprints.Direction; +import com.tinkerpop.blueprints.Edge; +import com.tinkerpop.blueprints.Features; +import com.tinkerpop.blueprints.Graph; +import com.tinkerpop.blueprints.GraphQuery; +import com.tinkerpop.blueprints.Vertex; +import com.tinkerpop.blueprints.util.io.graphml.GraphMLReader; + +/** + * A base class for a Blueprints wrapper around a bigdata back-end. + * + * @author mikepersonick + * + */ +public abstract class BigdataGraph implements Graph { + + /** + * URI used to represent a Vertex. + */ + public static final URI VERTEX = new URIImpl(BD.NAMESPACE + "Vertex"); + + /** + * URI used to represent a Edge. + */ + public static final URI EDGE = new URIImpl(BD.NAMESPACE + "Edge"); + + /** + * Factory for round-tripping between Blueprints data and RDF data. + */ + final BlueprintsRDFFactory factory; + + public BigdataGraph(final BlueprintsRDFFactory factory) { + + this.factory = factory; + + } + + /** + * For some reason this is part of the specification (i.e. part of the + * Blueprints test suite). + */ + public String toString() { + + return getClass().getSimpleName().toLowerCase(); + + } + + /** + * Different implementations will return different types of connections + * depending on the mode (client/server, embedded, read-only, etc.) + */ + protected abstract RepositoryConnection cxn() throws Exception; + + /** + * Return a single-valued property for an edge or vertex. + * + * @see {@link BigdataElement} + */ + public Object getProperty(final URI uri, final String prop) { + + return getProperty(uri, factory.toPropertyURI(prop)); + + } + + /** + * Return a single-valued property for an edge or vertex. + * + * @see {@link BigdataElement} + */ + public Object getProperty(final URI uri, final URI prop) { + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(uri, prop, null, false); + + if (result.hasNext()) { + + final Value value = result.next().getObject(); + + if (result.hasNext()) { + throw new RuntimeException(uri + + ": more than one value for p: " + prop + + ", did you mean to call getProperties()?"); + } + + if (!(value instanceof Literal)) { + throw new RuntimeException("not a property: " + value); + } + + final Literal lit = (Literal) value; + + return factory.fromLiteral(lit); + + } + + return null; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Return a multi-valued property for an edge or vertex. + * + * @see {@link BigdataElement} + */ + public List<Object> getProperties(final URI uri, final String prop) { + + return getProperties(uri, factory.toPropertyURI(prop)); + + } + + + /** + * Return a multi-valued property for an edge or vertex. + * + * @see {@link BigdataElement} + */ + public List<Object> getProperties(final URI uri, final URI prop) { + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(uri, prop, null, false); + + final List<Object> props = new LinkedList<Object>(); + + while (result.hasNext()) { + + final Value value = result.next().getObject(); + + if (!(value instanceof Literal)) { + throw new RuntimeException("not a property: " + value); + } + + final Literal lit = (Literal) value; + + props.add(factory.fromLiteral(lit)); + + } + + return props; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Return the property names for an edge or vertex. + * + * @see {@link BigdataElement} + */ + public Set<String> getPropertyKeys(final URI uri) { + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(uri, null, null, false); + + final Set<String> properties = new LinkedHashSet<String>(); + + while (result.hasNext()) { + + final Statement stmt = result.next(); + + if (!(stmt.getObject() instanceof Literal)) { + continue; + } + + if (stmt.getPredicate().equals(RDFS.LABEL)) { + continue; + } + + final String p = + factory.fromPropertyURI(stmt.getPredicate()); + + properties.add(p); + + } + + return properties; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Remove all values for a particular property on an edge or vertex. + * + * @see {@link BigdataElement} + */ + public Object removeProperty(final URI uri, final String prop) { + + return removeProperty(uri, factory.toPropertyURI(prop)); + + } + + /** + * Remove all values for a particular property on an edge or vertex. + * + * @see {@link BigdataElement} + */ + public Object removeProperty(final URI uri, final URI prop) { + + try { + + final Object oldVal = getProperty(uri, prop); + + cxn().remove(uri, prop, null); + + return oldVal; + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + + /** + * Set a single-value property on an edge or vertex (remove the old + * value first). + * + * @see {@link BigdataElement} + */ + public void setProperty(final URI uri, final String prop, final Object val) { + + setProperty(uri, factory.toPropertyURI(prop), factory.toLiteral(val)); + + } + + /** + * Set a single-value property on an edge or vertex (remove the old + * value first). + * + * @see {@link BigdataElement} + */ + public void setProperty(final URI uri, final URI prop, final Literal val) { + + try { + + cxn().remove(uri, prop, null); + + cxn().add(uri, prop, val); + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + + /** + * Add a property on an edge or vertex (multi-value property extension). + * + * @see {@link BigdataElement} + */ + public void addProperty(final URI uri, final String prop, final Object val) { + + setProperty(uri, factory.toPropertyURI(prop), factory.toLiteral(val)); + + } + + /** + * Add a property on an edge or vertex (multi-value property extension). + * + * @see {@link BigdataElement} + */ + public void addProperty(final URI uri, final URI prop, final Literal val) { + + try { + + cxn().add(uri, prop, val); + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + + /** + * Post a GraphML file to the remote server. (Bulk-upload operation.) + */ + public void loadGraphML(final String file) throws Exception { + + GraphMLReader.inputGraph(this, file); + + } + + /** + * Add an edge. + */ + @Override + public Edge addEdge(final Object key, final Vertex from, final Vertex to, + final String label) { + + if (label == null) { + throw new IllegalArgumentException(); + } + + final String eid = key != null ? key.toString() : UUID.randomUUID().toString(); + + final URI edgeURI = factory.toEdgeURI(eid); + + if (key != null) { + + final Edge edge = getEdge(key); + + if (edge != null) { + if (!(edge.getVertex(Direction.OUT).equals(from) && + (edge.getVertex(Direction.OUT).equals(to)))) { + throw new IllegalArgumentException("edge already exists: " + key); + } + } + + } + + try { + + // do we need to check this? +// if (cxn().hasStatement(edgeURI, RDF.TYPE, EDGE, false)) { +// throw new IllegalArgumentException("edge " + eid + " already exists"); +// } + + final URI fromURI = factory.toVertexURI(from.getId().toString()); + final URI toURI = factory.toVertexURI(to.getId().toString()); + + cxn().add(fromURI, edgeURI, toURI); + cxn().add(edgeURI, RDF.TYPE, EDGE); + cxn().add(edgeURI, RDFS.LABEL, factory.toLiteral(label)); + + return new BigdataEdge(new StatementImpl(fromURI, edgeURI, toURI), this); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Add a vertex. + */ + @Override + public Vertex addVertex(final Object key) { + + try { + + final String vid = key != null ? + key.toString() : UUID.randomUUID().toString(); + + final URI uri = factory.toVertexURI(vid); + + // do we need to check this? +// if (cxn().hasStatement(vertexURI, RDF.TYPE, VERTEX, false)) { +// throw new IllegalArgumentException("vertex " + vid + " already exists"); +// } + + cxn().add(uri, RDF.TYPE, VERTEX); + + return new BigdataVertex(uri, this); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Lookup an edge. + */ + @Override + public Edge getEdge(final Object key) { + + if (key == null) + throw new IllegalArgumentException(); + + try { + + final URI edge = factory.toEdgeURI(key.toString()); + + final RepositoryResult<Statement> result = + cxn().getStatements(null, edge, null, false); + + if (result.hasNext()) { + + final Statement stmt = result.next(); + + if (result.hasNext()) { + throw new RuntimeException( + "duplicate edge: " + key); + } + + return new BigdataEdge(stmt, this); + + } + + return null; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Iterate all edges. + */ + @Override + public Iterable<Edge> getEdges() { + + final URI wild = null; + return getEdges(wild, wild); + + } + + /** + * Find edges based on the from and to vertices and the edge labels, all + * optional parameters (can be null). The edge labels can be null to include + * all labels. + * <p> + * + * @param from + * the from vertex (null for wildcard) + * @param to + * the to vertex (null for wildcard) + * @param labels + * the edge labels to consider (optional) + * @return the edges matching the supplied criteria + */ + Iterable<Edge> getEdges(final URI from, final URI to, final String... labels) { + + final GraphQueryResult stmts = getElements(from, to, labels); + + return new EdgeIterable(stmts); + + } + + /** + * Translates the request to a high-performance SPARQL query: + * + * construct { + * ?from ?edge ?to . + * } where { + * ?edge rdf:type <Edge> . + * + * ?from ?edge ?to . + * + * # filter by edge label + * ?edge rdfs:label ?label . + * filter(?label in ("label1", "label2", ...)) . + * } + */ + protected GraphQueryResult getElements(final URI from, final URI to, + final String... labels) { + + final StringBuilder sb = new StringBuilder(); + sb.append("construct { ?from ?edge ?to . } where {\n"); + sb.append(" ?edge rdf:type bd:Edge .\n"); + sb.append(" ?from ?edge ?to .\n"); + if (labels != null && labels.length > 0) { + if (labels.length == 1) { + sb.append(" ?edge rdfs:label \"").append(labels[0]).append("\" .\n"); + } else { + sb.append(" ?edge rdfs:label ?label .\n"); + sb.append(" filter(?label in ("); + for (String label : labels) { + sb.append("\""+label+"\", "); + } + sb.setLength(sb.length()-2); + sb.append(")) .\n"); + } + } + sb.append("}"); + + // bind the from and/or to + final String queryStr = sb.toString() + .replace("?from", from != null ? "<"+from+">" : "?from") + .replace("?to", to != null ? "<"+to+">" : "?to"); + + try { + + final org.openrdf.query.GraphQuery query = + cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); + + final GraphQueryResult stmts = query.evaluate(); + + return stmts; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Find edges based on a SPARQL construct query. The query MUST construct + * edge statements: + * <p> + * construct { ?from ?edge ?to } where { ... } + * + * @see {@link BigdataGraphQuery} + */ + Iterable<Edge> getEdges(final String queryStr) { + + try { + + final org.openrdf.query.GraphQuery query = + cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); + + final GraphQueryResult stmts = query.evaluate(); + + return new EdgeIterable(stmts); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Find vertices based on the supplied from and to vertices and the edge + * labels. One or the other (from and to) must be null (wildcard), but not + * both. Use getEdges() for wildcards on both the from and to. The edge + * labels can be null to include all labels. + * + * @param from + * the from vertex (null for wildcard) + * @param to + * the to vertex (null for wildcard) + * @param labels + * the edge labels to consider (optional) + * @return + * the vertices matching the supplied criteria + */ + Iterable<Vertex> getVertices(final URI from, final URI to, + final String... labels) { + + if (from != null && to != null) { + throw new IllegalArgumentException(); + } + + if (from == null && to == null) { + throw new IllegalArgumentException(); + } + + final GraphQueryResult stmts = getElements(from, to, labels); + + return new VertexIterable(stmts, from == null); + + } + + /** + * Find vertices based on a SPARQL construct query. If the subject parameter + * is true, the vertices will be taken from the subject position of the + * constructed statements, otherwise they will be taken from the object + * position. + * + * @see {@link BigdataGraphQuery} + */ + Iterable<Vertex> getVertices(final String queryStr, final boolean subject) { + + try { + + final org.openrdf.query.GraphQuery query = + cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); + + final GraphQueryResult stmts = query.evaluate(); + + return new VertexIterable(stmts, subject); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Find edges with the supplied property value. + * + * construct { + * ?from ?edge ?to . + * } + * where { + * ?edge <prop> <val> . + * ?from ?edge ?to . + * } + */ + @Override + public Iterable<Edge> getEdges(final String prop, final Object val) { + + final URI p = factory.toPropertyURI(prop); + final Literal o = factory.toLiteral(val); + + try { + + final StringBuilder sb = new StringBuilder(); + sb.append("construct { ?from ?edge ?to . } where {\n"); + sb.append(" ?edge <"+p+"> "+o+" .\n"); + sb.append(" ?from ?edge ?to .\n"); + sb.append("}"); + + final String queryStr = sb.toString(); + + return getEdges(queryStr); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Lookup a vertex. + */ + @Override + public Vertex getVertex(final Object key) { + + if (key == null) + throw new IllegalArgumentException(); + + final URI uri = factory.toVertexURI(key.toString()); + + try { + + if (cxn().hasStatement(uri, RDF.TYPE, VERTEX, false)) { + return new BigdataVertex(uri, this); + } + + return null; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + + /** + * Iterate all vertices. + */ + @Override + public Iterable<Vertex> getVertices() { + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(null, RDF.TYPE, VERTEX, false); + + return new VertexIterable(result, true); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Find vertices with the supplied property value. + */ + @Override + public Iterable<Vertex> getVertices(final String prop, final Object val) { + + final URI p = factory.toPropertyURI(prop); + final Literal o = factory.toLiteral(val); + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(null, p, o, false); + + return new VertexIterable(result, true); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Providing an override implementation for our GraphQuery to avoid the + * low-performance scan and filter paradigm. See {@link BigdataGraphQuery}. + */ + @Override + public GraphQuery query() { +// return new DefaultGraphQuery(this); + return new BigdataGraphQuery(this); + } + + /** + * Remove an edge and its properties. + */ + @Override + public void removeEdge(final Edge edge) { + + try { + + final URI uri = factory.toURI(edge); + + if (!cxn().hasStatement(uri, RDF.TYPE, EDGE, false)) { + throw new IllegalStateException(); + } + + final URI wild = null; + + // remove the edge statement + cxn().remove(wild, uri, wild); + + // remove its properties + cxn().remove(uri, wild, wild); + + } catch (Exception e) { + throw new RuntimeException(e); + ... [truncated message content] |
From: <dme...@us...> - 2014-05-13 18:15:29
|
Revision: 8297 http://sourceforge.net/p/bigdata/code/8297 Author: dmekonnen Date: 2014-05-13 18:15:26 +0000 (Tue, 13 May 2014) Log Message: ----------- documentation updates. Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/default.rb Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md 2014-05-13 16:57:51 UTC (rev 8296) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md 2014-05-13 18:15:26 UTC (rev 8297) @@ -1,114 +1,311 @@ -systap-bigdata Cookbook -====================== -This cookbook provides [http://www.bigdata.com/bigdata/blog/](bigdata v1.3.0) under Tomcat 7 (latest), with Oracle JDK 7 (latest) within an Ubuntu 12.0.4 VM. +Bigdata Cookbook +================ +The Bigdata cookbook provides the [bigdata v1.3.0](http://www.bigdata.com/bigdata/blog/) opensource triplestore/graph database. The cookbook provides recipes to install the Bigdata server as a web application under Tomcat, with its own embedded Jetty server (NSS - the NanoSparqlServer). The recipes will install pre-configured packages by default and optionally may build and install the server directly from source archive. -Typical synopsis: +For more info on Bigdata please visit: - % vagrant up - - The bigdata service is then available at: http://33.33.33.10:8080/bigdata/ +* Bigdata Homepage: [http://www.bigdata.com/bigdata/blog/](http://www.bigdata.com/bigdata/blog/) +* Bigdata SourceForge Page: [http://sourceforge.net/projects/bigdata/](http://sourceforge.net/projects/bigdata/) - Requirements ------------ +Chef 11 or higher<br/> +Ruby 1.9 (preferably from the Chef full-stack installer) -#### packages -In a stand alone context, this cookbook assumes the following resources have been installed: -* `VirtualBox` - Virtual machine provider [http://virtualbox.org/](http://virtualbox.org/) -* `Vagrant` - Environment assembler [http://vagrantup.com/](http://vagrantup.com/) -* `Berkshelf` - The Berkshelf cookbook manager [http://berkshelf.com/](http://berkshelf.com/). +Attributes +---------- -#### cookbook dependencies -Chef 10.14.2 or higher - has not been tested with previous versions. +### General Attributes +<table> + <tr> + <th>Attribute</th> + <th>Description</th> + <th>Default</th> + </tr> + <tr> + <td><tt>default['bigdata'][:home]</tt></td> + <td>Where the extracted bigdata.war contents are found</td> + <td>Default: <tt>/var/lib/bigdata</tt></td> + </tr> + <tr> + <td><tt>default['bigdata'][:url]</tt></td> + <td>Where to download the bigdata package file from</td> + <td>Tomcat: http://sourceforge.net/projects/bigdata/files/bigdata/1.3.0/bigdata.war/download<br/>NSS: http://bigdata.com/deploy/bigdata-1.3.0.tgz</td> + </tr> + <tr> + <td><tt>default['bigdata'][:data_dir]</tt></td> + <td>Where the bigdata.jni should reside. Discussed in <a href="http://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer#Common_Startup_Problems">"Common Startup Problmems</a></td> + <td>default['bigdata'][:home]/data<br/>default['bigdata'][:home]/var/data</td> + </tr> + <tr> + <td><tt>default['bigdata'][:log_dir]</tt></td> + <td>Where bigdata log files should reside (i.e. queryLog.csv, rules.log, queryRunStateLog.csv).</td> + <td>Tomcat: default['bigdata'][:home]/var/log<br/>NSS: default['bigdata'][:home]/var/log</td> + </tr> + <tr> + <td><tt>default['bigdata'][:properties]</tt></td> + <td></td> + <td>default['bigdata'][:home]/RWStore.properties</td> + </tr> + <tr> + <td><tt>default['bigdata'][:svn_branch]</tt></td> + <td></td> + <td>https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT_BRANCH_1_3_1</td> + </tr> + <tr> + <td><tt>default['bigdata'][:source]</tt></td> + <td>The directory to retrieve Subversion contents into</td> + <td>bigdata-code</td> + </tr> + <tr> + <td><tt>default['bigdata']['journal.AbstractJournal.bufferMode']</tt></td> + <td></td> + <td>DiskRW</td> + </tr> + <tr> + <td><tt>default['bigdata']['service.AbstractTransactionService.minReleaseAge']</tt></td> + <td></td> + <td>1</td> + </tr> + <tr> + <td><tt>default['bigdata']['btree.writeRetentionQueue.capacity']</tt></td> + <td></td> + <td>4000</td> + </tr> + <tr> + <td><tt>default['bigdata']['btree.BTree.branchingFactor']</tt></td> + <td></td> + <td>128</td> + </tr> + <tr> + <td><tt>default['bigdata']['journal.AbstractJournal.initialExtent']</tt></td> + <td></td> + <td>209715200</td> + </tr> + <tr> + <td><tt>default['bigdata']['journal.AbstractJournal.maximumExtent']</tt></td> + <td></td> + <td>209715200</td> + </tr> + <tr> + <td><tt>default['bigdata']['rdf.sail.truthMaintenance']</tt></td> + <td></td> + <td>false</td> + </tr> + <tr> + <td><tt>default['bigdata']['rdf.store.AbstractTripleStore.quads']</tt></td> + <td></td> + <td>false</td> + </tr> + <tr> + <td><tt>default['bigdata']['rdf.store.AbstractTripleStore.statementIdentifiers']</tt></td> + <td></td> + <td>false</td> + </tr> + <tr> + <td><tt>default['bigdata']['rdf.store.AbstractTripleStore.textIndex']</tt></td> + <td></td> + <td>false</td> + </tr> + <tr> + <td><tt>default['bigdata']['rdf.store.AbstractTripleStore.axiomsClass']</tt></td> + <td></td> + <td>com.bigdata.rdf.axioms.NoAxioms</td> + </tr> + <tr> + <td><tt>default['bigdata']['namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor']</tt></td> + <td></td> + <td>400</td> + </tr> + <tr> + <td><tt>default['bigdata']['namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor']</tt></td> + <td></td> + <td>1024</td> + </tr> + <tr> + <td><tt>default['bigdata']['rdf.sail.bufferCapacity']</tt></td> + <td>The number of statements to buffer before committing to the persistence layer.</td> + <td>100000</td> + </tr> + <tr> + <td><tt>default['mapgraph'][:source]</tt></td> + <td></td> + <td>mapgraph-code</td> + </tr> + <tr> + <td><tt>default['mapgraph'][:svn_branch]</tt></td> + <td></td> + <td>https://svn.code.sf.net/p/mpgraph/code/trunk</td> + </tr> +</table> -The following Opscode cookbooks are dependencies (automatically retrieved by `Berkshelf`): +### Attributes for Tomcat Based Install -* apt -* java -* tomcat +<table> + <tr> + <th>Attribute</th> + <th>Description</th> + <th>Default</th> + </tr> + <tr> + <td><tt>default['bigdata'][:web_home]</tt></td> + <td></td> + <td>default['tomcat'][:webapp_dir]/bigdata</td> + </tr> + <tr> + <td><tt>default['bigdata'][:log4j_properties]</tt></td> + <td></td> + <td>Tomcat:default['bigdata'][:web_home]/WEB-INF/classes/log4j.properties</td> + </tr> +</table> +### Attributes for NanoSparqlServer (NSS) Based Install - -Attributes ----------- - - -#### systap-bigdata::default <table> <tr> - <th>Key</th> - <th>Type</th> + <th>Attribute</th> <th>Description</th> <th>Default</th> </tr> <tr> - <td><tt>url</tt></td> - <td>String</td> - <td>where to download the bigdata.war file form</td> - <td><tt>http://sourceforge.net/projects/bigdata/ - files/bigdata/1.3.0/bigdata.war/download</tt></td> + <td><tt>default['bigdata'][:user]</tt></td> + <td></td> + <td>bigdata</td> </tr> <tr> - <td><tt>home</tt></td> - <td>String</td> - <td>where the extracted bigdata.war contents are found</td> - <td>Default: <tt>/var/lib/tomcat7/webapps/bigdata</tt></td> + <td><tt>default['bigdata'][:group]</tt></td> + <td></td> + <td>bigdata</td> </tr> <tr> - <td><tt>etc</tt></td> - <td>String</td> - <td>Where under the tomcat root the log files and the bigdata.jni should reside. Discussed in <a href="http://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer#Common_Startup_Problems">"Common Startup Problmems</a></td> - <td>Default: <tt>/var/lib/tomcat7/webapps/bigdata/etc</tt></td> + <td><tt>default['bigdata'][:jetty_dir]</tt></td> + <td></td> + <td>default['bigdata'][:home]/var/jetty</td> </tr> </table> +### Attributes for MapGraph +<table> + <tr> + <th>Attribute</th> + <th>Description</th> + <th>Default</th> + </tr> + <tr> + <td><tt>default['mapgraph'][:source]</tt></td> + <td></td> + <td>mapgraph-code</td> + </tr> + <tr> + <td><tt>default['mapgraph'][:svn_branch]</tt></td> + <td></td> + <td>https://svn.code.sf.net/p/mpgraph/code/trunk</td> + </tr> +</table> + + +Recipes +------- + +A default recipe is not provided by the Bigdata cookbook. The user is given the option to install the Bigdata server under Tomcat or as a Jetty application. Under both options, Bigdata may optinally be built directly from the a Subversion source code branch. + +### tomcat + +Installs the [Tomcat](http://tomcat.apache.org/) server and then bigdata as a web application. Bigdata will be configured according to the attributes. If no attributes are given, Bigdata will be installed with the systems defaults. + +If the `build_from_svn` attribute is set to `true` Bigdata will be build from the Subversion repository given in the `svn_branch` attribute. + +### nss + +Installs the Bigdata server to run in the [NanoSparqlServer](http://wiki.bigdata.com/wiki/index.php/NanoSparqlServer) (Jetty) mode. + + +If the `build_from_svn` attribute is set to `true` Bigdata will be build from the Subversion repository given in the `svn_branch` attribute. + + +### mapgraph + +Retrieves the [MapGraph](http://sourceforge.net/projects/mpgraph/) project from its Subversion archive at SourceForget and builds it. +This recipe can only be used with GPU architecture and has only been validated against Amazon's "NVIDIA GRID GPU Driver" AMI. + + Usage ----- -### Stand Alone Context -To bring the VM up the first time, or any future time after a `halt`, invoke from the cookbook directory: - % vagrant up -The cookbbok will retrieve the Ubuntu 12.04 VM, Oracle's JDK 7, Apahce's Tomcat 7 and the Bigdata WAR file. These downloads may take a significant amount of time to complete. Should a download be interupted or some other error occur, continue with: - - % vagrant provision - -Once complete, the bigdata server will be available under: - [http://33.33.33.10:8080/bigdata/](http://33.33.33.10:8080/bigdata/) +### Vagrant Context -To halt the VM: +Sample Vagrant configurations are available in the Bigdata Subversion source tree under [bigdata/src/resources/deployment/vagrant](http://sourceforge.net/p/bigdata/code/HEAD/tree/branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/). - % vagrant halt +#### Tomcat Example -To delete the VM and from VirtualBox: +``` + chef.json = { + :bigdata => { + :install_type => "tomcat", + :build_from_svn => true + }, + :java => { + "install_flavor" => "oracle", + :jdk_version => "7", + :oracle => { 'accept_oracle_download_terms' => true } + }, + :tomcat => { + :base_version => "7" + } + } - % vagrant destroy - -To login into the VM: + chef.run_list = [ + ... + "recipe[bigdata::tomcat]" + ... + ] - % vagrant ssh +``` -### Cookbook Context +#### NSS Example -To use as a reciple in new cookbook, just include `systap-bigdata` in your node's `run_list` in the standard way: - ``` - "run_list": [ - "recipe[systap-bigdata::default]", - ... - ] + chef.json = { + :bigdata => { + :install_type => "nss" + }, + :java => { + "install_flavor" => "oracle", + :jdk_version => "7", + :oracle => { 'accept_oracle_download_terms' => true } + } + } + chef.run_list = [ + ... + "recipe[bigdata::nss]" + ... + ] + + ``` +### Trouble Shooting + +The Bigdta cookbook recipes have been tested thoroughly in the Vagrant context with VirtualBox and AWS providers using Ubuntu 12.04 and Oracle's JDK 7. + +When errors occur in the Vagrant context, it is most typically during the installation process where a network timeout has occurred during the retrieval of a dependent resource. simply continue with: + + % vagrant provision + +Which should get past any intermit ant network issues. For assistance with installation and other issues, please visit the [Bigdata Support Forum](http://sourceforge.net/p/bigdata/discussion/676946). + + License and Authors ------------------- Author:: Daniel Mekonnen [daniel<no-spam-at>systap.com] ``` -This pakcage may be resiributed under the same terms and conditions as the Bigdata project that it is a part of. +GNU GPLv2 - This pakcage may be resiributed under the same terms and conditions as the Bigdata project that it is a part of. ``` Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/default.rb 2014-05-13 16:57:51 UTC (rev 8296) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/default.rb 2014-05-13 18:15:26 UTC (rev 8297) @@ -32,7 +32,6 @@ default['bigdata'][:url] = "http://softlayer-dal.dl.sourceforge.net/project/bigdata/bigdata/1.3.0/bigdata.war" default['bigdata'][:web_home] = default['tomcat'][:webapp_dir] + "/bigdata" - default['bigdata'][:web_xml] = default['bigdata'][:web_home] + "/WEB-INF/web.xml" default['bigdata'][:log4j_properties] = default['bigdata'][:web_home] + "/WEB-INF/classes/log4j.properties" # Where the bigdata-ha.jnl file will live: This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2014-05-13 16:57:55
|
Revision: 8296 http://sourceforge.net/p/bigdata/code/8296 Author: thompsonbry Date: 2014-05-13 16:57:51 +0000 (Tue, 13 May 2014) Log Message: ----------- Changing log level for detailed host metrics => TRACE. This information is available in /bigdata/status and really clutters up the log. Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractHostLBSPolicy.java Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractHostLBSPolicy.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractHostLBSPolicy.java 2014-05-13 16:45:43 UTC (rev 8295) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractHostLBSPolicy.java 2014-05-13 16:57:51 UTC (rev 8296) @@ -459,8 +459,8 @@ } - if (log.isInfoEnabled()) - log.info("hostMetricsMap=" + hostMetricsMap); + if (log.isTraceEnabled()) + log.trace("hostMetricsMap=" + hostMetricsMap); final HostTable newHostTable = normalizeHostScores(scoringRule, hostMetricsMap); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2014-05-13 16:45:46
|
Revision: 8295 http://sourceforge.net/p/bigdata/code/8295 Author: thompsonbry Date: 2014-05-13 16:45:43 +0000 (Tue, 13 May 2014) Log Message: ----------- host-based LBS bug fix. Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractHostLBSPolicy.java Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractHostLBSPolicy.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractHostLBSPolicy.java 2014-05-13 16:15:33 UTC (rev 8294) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractHostLBSPolicy.java 2014-05-13 16:45:43 UTC (rev 8295) @@ -494,7 +494,6 @@ */ final int nhosts = hostMetricsMap.size(); - // int nhostsWithServices = 0; final String[] hostnames = new String[nhosts]; @@ -533,7 +532,7 @@ totalScore += hostScore; - // nhostsWithServices++; + i++; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <tho...@us...> - 2014-05-13 16:15:39
|
Revision: 8294 http://sourceforge.net/p/bigdata/code/8294 Author: thompsonbry Date: 2014-05-13 16:15:33 +0000 (Tue, 13 May 2014) Log Message: ----------- Commit includes fixes for #920 (content-negotation) and $624 (HA Load Balancer). I have run through the NSS, AST evaluation, and QUADS mode test suites and everything is green. The TestAll_LBS test suite is also green (HA). - CONNEG was broken in previous releases and would return available Content-Type corresponding to the least desired MIME Type as specified by the Accept header. See #920. Changes to ConnegScore, ConnegUtil, TestConneg. - RemoteRepository: A bug was identified where the openrdf binary RDF interchange type could not be used because a non-null charset would cause a Reader to be allocated rather than an InputStream within the BackgroundGraphResult. Historically, due to #920, this interchange type was not preferred and hence this code path was not tested. The fix was to use the default charset for the format associated with the Content-Type of the response unless overridden by an explicit charset in the encoding. - Added a new LBS policy (CountersLBSPolicy) based on the /bigdata/counters servlet. This policy is more chatty than the GangliaLBSPolicy, but it can be used in environments that do not support multicast and can be secured using standard techniques for httpd. The GangliaLBSPolicy was heavily refactored to create an abstract base class that is now shared by both the CountersLBSPolicy and the GangliaLBSPolicy. Added documentation to web.xml and the HALoadBalancer page of the wiki. See #624. - Release a new bigdata-ganglia.jar (v1.0.4). This release permits the Comparator to be null, which is useful since we want to order the hosts based on our IHostScoringRule rather than a simple ganglia metric comparison. - AbstractStatisticsCollection: Added @Override tags and FIXME on getCounters(). - CounterSet: private and final attributes. ignoring some unchecked conversions or raw types. @Override attributes. - ICounterSetSelector: expanded the interface slightly to allow optional filtering for HistoryInstruments (was implicit and manditory). This was necessary in order to support XML rendering of /bigdata/counters. - CounterSetFormat: Added to support CONNEG for the different kinds of counter set interchange (text/plain, text/html, application/xml). This was in support of the new CountersLBSPolicy. - IOStatCollector, VMStatCollector: Fixed some bugs in the OSX platforn metrics collectors, mostly around data races. - BigdataSailRemoteRepositoryConnection: added link to #914 (Set timeout on remote query). I have not worked on this ticket yet, but these comments mark the integration points. The other integration point is BigdataRDFContext.newQuery(), which is also linked to the ticket in this commit. - CountersServlet: modified to support CONNEG. - ConnegOptions: added toString(). clean up. - jetty.xml: refactored per guidence from webtide. - web.xml: comments on the CountersLBSPolicy. Modified Paths: -------------- branches/BIGDATA_RELEASE_1_3_0/.classpath branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/AbstractStatisticsCollector.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/CounterSet.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/osx/IOStatCollector.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/osx/VMStatCollector.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/CounterSetBTreeSelector.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/CounterSetSelector.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/ICounterSelector.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/URLQueryModel.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/render/TextRenderer.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/render/XHTMLRenderer.java branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/render/XMLRenderer.java branches/BIGDATA_RELEASE_1_3_0/bigdata-ganglia/build.properties branches/BIGDATA_RELEASE_1_3_0/bigdata-ganglia/src/java/com/bigdata/ganglia/GangliaService.java branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/AbstractHA3JournalServerTestCase.java branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestAll_LBS.java branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/log4j-template-A.properties branches/BIGDATA_RELEASE_1_3_0/bigdata-rdf/src/java/com/bigdata/rdf/properties/PropertiesFormat.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/remote/BigdataSailRemoteRepositoryConnection.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BigdataRDFContext.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/ConnegScore.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/ConnegUtil.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/CountersServlet.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/HALoadBalancerServlet.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/client/ConnectOptions.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/client/RemoteRepository.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/client/RemoteRepositoryManager.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractLBSPolicy.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/HostScore.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/IHALoadBalancerPolicy.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/ServiceScore.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/NOPLBSPolicy.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/RoundRobinLBSPolicy.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/ganglia/DefaultHostScoringRule.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/ganglia/GangliaLBSPolicy.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/ganglia/LoadOneHostScoringRule.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestAll.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestAll2.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestConneg.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestNanoSparqlServerWithProxyIndexManager2.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/TestProtocolAll.java branches/BIGDATA_RELEASE_1_3_0/bigdata-war/src/WEB-INF/web.xml branches/BIGDATA_RELEASE_1_3_0/bigdata-war/src/jetty.xml branches/BIGDATA_RELEASE_1_3_0/build.properties branches/BIGDATA_RELEASE_1_3_0/pom.xml Added Paths: ----------- branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.4.jar branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/format/ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/format/CounterSetFormat.java branches/BIGDATA_RELEASE_1_3_0/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3LoadBalancer_CountersLBS.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractHostLBSPolicy.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/AbstractHostMetrics.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/HostTable.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/IHostMetrics.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/IHostScoringRule.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/NOPHostScoringRule.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/counters/ branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/counters/CounterSetHostMetricsWrapper.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/counters/CountersLBSPolicy.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/counters/DefaultHostScoringRule.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/ganglia/GangliaHostMetricWrapper.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/lbs/ branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/lbs/TestAbstractHostLBSPolicy.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/lbs/TestAll.java Removed Paths: ------------- branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.3.jar branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/ganglia/HostTable.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/ganglia/IHostScoringRule.java branches/BIGDATA_RELEASE_1_3_0/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/lbs/policy/ganglia/NOPHostScoringRule.java Modified: branches/BIGDATA_RELEASE_1_3_0/.classpath =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/.classpath 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/.classpath 2014-05-13 16:15:33 UTC (rev 8294) @@ -76,7 +76,7 @@ <classpathentry exported="true" kind="lib" path="bigdata-sails/lib/httpcomponents/commons-fileupload-1.2.2.jar"/> <classpathentry exported="true" kind="lib" path="bigdata-sails/lib/httpcomponents/commons-io-2.1.jar"/> <classpathentry exported="true" kind="lib" path="bigdata/lib/apache/log4j-1.2.17.jar"/> - <classpathentry exported="true" kind="lib" path="bigdata-rdf/lib/openrdf-sesame-2.6.10-onejar.jar"/> + <classpathentry exported="true" kind="lib" path="bigdata-rdf/lib/openrdf-sesame-2.6.10-onejar.jar" sourcepath="/Users/bryan/Documents/workspace/org.openrdf.sesame-2.6.10"/> <classpathentry exported="true" kind="lib" path="bigdata-rdf/lib/sesame-rio-testsuite-2.6.10.jar"/> <classpathentry exported="true" kind="lib" path="bigdata-sails/lib/sesame-sparql-testsuite-2.6.10.jar"/> <classpathentry exported="true" kind="lib" path="bigdata-sails/lib/sesame-store-testsuite-2.6.10.jar"/> Deleted: branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.3.jar =================================================================== (Binary files differ) Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.4.jar =================================================================== (Binary files differ) Index: branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.4.jar =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.4.jar 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.4.jar 2014-05-13 16:15:33 UTC (rev 8294) Property changes on: branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/bigdata-ganglia-1.0.4.jar ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +application/octet-stream \ No newline at end of property Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/AbstractStatisticsCollector.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/AbstractStatisticsCollector.java 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/AbstractStatisticsCollector.java 2014-05-13 16:15:33 UTC (rev 8294) @@ -137,6 +137,7 @@ * The interval in seconds at which the counter values are read from the * host platform. */ + @Override public int getInterval() { return interval; @@ -225,8 +226,15 @@ * <p> * Note: Subclasses MUST extend this method to initialize their own * counters. + * + * TODO Why does this use the older <code>synchronized</code> pattern with a + * shared {@link #countersRoot} object rather than returning a new object + * per request? Check assumptions in the scale-out and local journal code + * bases for this. */ - synchronized public CounterSet getCounters() { + @Override + synchronized + public CounterSet getCounters() { if (countersRoot == null) { @@ -319,6 +327,7 @@ serviceRoot.addCounter(IProcessCounters.Memory_runtimeFreeMemory, new Instrument<Long>() { + @Override public void sample() { setValue(Runtime.getRuntime().freeMemory()); } @@ -326,6 +335,7 @@ serviceRoot.addCounter(IProcessCounters.Memory_runtimeTotalMemory, new Instrument<Long>() { + @Override public void sample() { setValue(Runtime.getRuntime().totalMemory()); } @@ -599,6 +609,7 @@ * Start collecting host performance data -- must be extended by the * concrete subclass. */ + @Override public void start() { if (log.isInfoEnabled()) @@ -612,6 +623,7 @@ * Stop collecting host performance data -- must be extended by the concrete * subclass. */ + @Override public void stop() { if (log.isInfoEnabled()) @@ -634,6 +646,7 @@ final Thread t = new Thread() { + @Override public void run() { AbstractStatisticsCollector.this.stop(); Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/CounterSet.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/CounterSet.java 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/CounterSet.java 2014-05-13 16:15:33 UTC (rev 8294) @@ -87,7 +87,7 @@ */ public class CounterSet extends AbstractCounterSet implements ICounterSet { - static protected final Logger log = Logger.getLogger(CounterSet.class); + static private final Logger log = Logger.getLogger(CounterSet.class); // private String pathx; private final Map<String,ICounterNode> children = new ConcurrentHashMap<String,ICounterNode>(); @@ -107,7 +107,7 @@ * @param name * The name of the child. */ - private CounterSet(String name,CounterSet parent) { + private CounterSet(final String name, final CounterSet parent) { super(name,parent); @@ -159,6 +159,9 @@ // // } + /** + * Return <code>true</code> iff there are no children. + */ public boolean isLeaf() { return children.isEmpty(); @@ -216,7 +219,6 @@ } - @SuppressWarnings("unchecked") private void attach2(final ICounterNode src, final boolean replace) { if (src == null) @@ -286,7 +288,7 @@ } else { - ((Counter)src).parent = this; + ((Counter<?>)src).parent = this; } @@ -311,7 +313,8 @@ * @return The node -or- <code>null</code> if there is no node with that * path. */ - synchronized public ICounterNode detach(String path) { + @SuppressWarnings({ "rawtypes", "unchecked" }) + synchronized public ICounterNode detach(final String path) { final ICounterNode node = getPath(path); @@ -347,7 +350,7 @@ * @todo optimize for patterns that are anchored by filtering the child * {@link ICounterSet}. */ - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "rawtypes" }) public Iterator<ICounter> counterIterator(final Pattern filter) { final IStriterator src = new Striterator(directChildIterator( @@ -391,7 +394,7 @@ * * @return */ - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "rawtypes" }) public Iterator<ICounterNode> getNodes(final Pattern filter) { IStriterator src = ((IStriterator) postOrderIterator()) @@ -414,7 +417,8 @@ } - @SuppressWarnings("unchecked") + @Override + @SuppressWarnings({ "unchecked", "rawtypes" }) public Iterator<ICounter> getCounters(final Pattern filter) { IStriterator src = ((IStriterator) postOrderIterator()) @@ -450,8 +454,9 @@ * When <code>null</code> all directly attached children * (counters and counter sets) are visited. */ - public Iterator directChildIterator(boolean sorted, - Class<? extends ICounterNode> type) { + @SuppressWarnings("rawtypes") + public Iterator directChildIterator(final boolean sorted, + final Class<? extends ICounterNode> type) { /* * Note: In order to avoid concurrent modification problems under @@ -514,7 +519,7 @@ * child with a post-order traversal of its children and finally visits this * node itself. */ - @SuppressWarnings("unchecked") + @SuppressWarnings({ "rawtypes", "unchecked" }) public Iterator postOrderIterator() { /* @@ -531,6 +536,7 @@ * child with a pre-order traversal of its children and finally visits this * node itself. */ + @SuppressWarnings({ "rawtypes", "unchecked" }) public Iterator preOrderIterator() { /* @@ -562,7 +568,9 @@ /* * Expand each child in turn. */ - protected Iterator expand(Object childObj) { + @Override + @SuppressWarnings("rawtypes") + protected Iterator expand(final Object childObj) { /* * A child of this node. @@ -603,7 +611,9 @@ /* * Expand each child in turn. */ - protected Iterator expand(Object childObj) { + @Override + @SuppressWarnings("rawtypes") + protected Iterator expand(final Object childObj) { /* * A child of this node. @@ -624,7 +634,8 @@ } - public ICounterNode getChild(String name) { + @Override + public ICounterNode getChild(final String name) { if (name == null) throw new IllegalArgumentException(); @@ -642,6 +653,7 @@ * * @return The {@link CounterSet} described by the path. */ + @Override synchronized public CounterSet makePath(String path) { if (path == null) { @@ -740,6 +752,7 @@ * The object that is used to take the measurements from which * the counter's value will be determined. */ + @SuppressWarnings("rawtypes") synchronized public ICounter addCounter(final String path, final IInstrument instrument) { @@ -767,7 +780,7 @@ } - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "rawtypes" }) private ICounter addCounter2(final String name, final IInstrument instrument) { if (name == null) @@ -831,12 +844,14 @@ * * @throws IOException */ + @Override public void asXML(Writer w, Pattern filter) throws IOException { XMLUtility.INSTANCE.writeXML(this, w, filter); } + @Override public void readXML(final InputStream is, final IInstrumentFactory instrumentFactory, final Pattern filter) throws IOException, ParserConfigurationException, SAXException { Added: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/format/CounterSetFormat.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/format/CounterSetFormat.java (rev 0) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/format/CounterSetFormat.java 2014-05-13 16:15:33 UTC (rev 8294) @@ -0,0 +1,214 @@ +/** + +Copyright (C) SYSTAP, LLC 2006-2012. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +/* +Portions of this code are: + +Copyright Aduna (http://www.aduna-software.com/) � 2001-2007 + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*/ +/* + * Created on Jul 25, 2012 + */ +package com.bigdata.counters.format; + +import info.aduna.lang.FileFormat; + +import java.nio.charset.Charset; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.concurrent.CopyOnWriteArraySet; + +import com.bigdata.counters.ICounterSet; + +/** + * Formats for {@link ICounterSet}s. + * + * @author <a href="mailto:tho...@us...">Bryan Thompson</a> + */ +public class CounterSetFormat extends FileFormat implements Iterable<CounterSetFormat> { + + /** + * All known/registered formats for this class. + */ + private static final CopyOnWriteArraySet<CounterSetFormat> formats = new CopyOnWriteArraySet<CounterSetFormat>(); + + /** + * A thread-safe iterator that will visit all known formats (declared by + * {@link Iterable}). + */ + @Override + public Iterator<CounterSetFormat> iterator() { + + return formats.iterator(); + + } + + /** + * Alternative static method signature. + */ + static public Iterator<CounterSetFormat> getFormats() { + + return formats.iterator(); + + } + + /** + * Text properties file using <code>text/plain</code> and + * <code>UTF-8</code>. + */ + public static final CounterSetFormat TEXT = new CounterSetFormat(// + "text/plain",// + Arrays.asList("text/plain"),// + Charset.forName("UTF-8"), // + Arrays.asList("counterSet")// + ); + + /** + * XML properties file using <code>application/xml</code> and + * <code>UTF-8</code>. + */ + public static final CounterSetFormat XML = new CounterSetFormat(// + "application/xml",// + Arrays.asList("application/xml"),// + Charset.forName("UTF-8"),// charset + Arrays.asList("xml")// known-file-extensions + ); + + /** + * XML properties file using <code>text/html</code> and <code>UTF-8</code>. + */ + public static final CounterSetFormat HTML = new CounterSetFormat(// + "text/html",// + Arrays.asList("text/html"),// + Charset.forName("UTF-8"),// charset + Arrays.asList("html")// known-file-extensions + ); + + /** + * Registers the specified format. + */ + public static void register(final CounterSetFormat format) { + + formats.add(format); + + } + + static { + + register(HTML); + register(TEXT); + register(XML); + + } + + /** + * Creates a new RDFFormat object. + * + * @param name + * The name of the RDF file format, e.g. "RDF/XML". + * @param mimeTypes + * The MIME types of the RDF file format, e.g. + * <tt>application/rdf+xml</tt> for the RDF/XML file format. + * The first item in the list is interpreted as the default + * MIME type for the format. + * @param charset + * The default character encoding of the RDF file format. + * Specify <tt>null</tt> if not applicable. + * @param fileExtensions + * The RDF format's file extensions, e.g. <tt>rdf</tt> for + * RDF/XML files. The first item in the list is interpreted + * as the default file extension for the format. + */ + public CounterSetFormat(final String name, + final Collection<String> mimeTypes, final Charset charset, + final Collection<String> fileExtensions) { + + super(name, mimeTypes, charset, fileExtensions); + + } + + /** + * Tries to determine the appropriate file format based on the a MIME type + * that describes the content type. + * + * @param mimeType + * A MIME type, e.g. "text/html". + * @return An {@link CounterSetFormat} object if the MIME type was + * recognized, or <tt>null</tt> otherwise. + * @see #forMIMEType(String,PropertiesFormat) + * @see #getMIMETypes() + */ + public static CounterSetFormat forMIMEType(final String mimeType) { + + return forMIMEType(mimeType, null); + + } + + /** + * Tries to determine the appropriate file format based on the a MIME type + * that describes the content type. The supplied fallback format will be + * returned when the MIME type was not recognized. + * + * @param mimeType + * A file name. + * @return An {@link CounterSetFormat} that matches the MIME type, or the + * fallback format if the extension was not recognized. + * @see #forMIMEType(String) + * @see #getMIMETypes() + */ + public static CounterSetFormat forMIMEType(String mimeType, + CounterSetFormat fallback) { + + return matchMIMEType(mimeType, formats/* Iterable<FileFormat> */, + fallback); + + } + +} \ No newline at end of file Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/osx/IOStatCollector.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/osx/IOStatCollector.java 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/osx/IOStatCollector.java 2014-05-13 16:15:33 UTC (rev 8294) @@ -28,11 +28,11 @@ package com.bigdata.counters.osx; -import java.util.HashMap; -import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Pattern; import com.bigdata.counters.AbstractProcessCollector; @@ -48,14 +48,13 @@ import com.bigdata.rawstore.Bytes; /** - * Collects some counters using <code>iostat</code>. Unfortunately, + * Collects some counters using <code>iostat</code> under OSX. Unfortunately, * <code>iostat</code> does not break down the reads and writes and does not * report IO Wait. This information is obviously available from OSX as it is * provided by the ActivityMonitor, but we can not get it from * <code>iostat</code>. * * @author <a href="mailto:tho...@us...">Bryan Thompson</a> - * @version $Id: VMStatCollector.java 4289 2011-03-10 21:22:30Z thompsonbry $ */ public class IOStatCollector extends AbstractProcessCollector implements ICounterHierarchy, IRequiredHostCounters, IHostCounters{ @@ -77,7 +76,7 @@ } - public I(String path) { + public I(final String path) { assert path != null; @@ -85,9 +84,10 @@ } + @Override public long lastModified() { - return lastModified; + return lastModified.get(); } @@ -95,7 +95,8 @@ * @throws UnsupportedOperationException * always. */ - public void setValue(T value, long timestamp) { + @Override + public void setValue(final T value, final long timestamp) { throw new UnsupportedOperationException(); @@ -114,7 +115,7 @@ DI(final String path) { - this(path,1d); + this(path, 1d); } @@ -126,7 +127,7 @@ } - + @Override public Double getValue() { final Double value = (Double) vals.get(path); @@ -146,14 +147,14 @@ /** * Map containing the current values for the configured counters. The keys * are paths into the {@link CounterSet}. The values are the data most - * recently read from <code>vmstat</code>. + * recently read from <code>iostat</code>. */ - final private Map<String, Object> vals = new HashMap<String, Object>(); + final private Map<String, Object> vals = new ConcurrentHashMap<String, Object>(); /** * The timestamp associated with the most recently collected values. */ - private long lastModified = System.currentTimeMillis(); + private final AtomicLong lastModified = new AtomicLong(System.currentTimeMillis()); /** * The {@link Pattern} used to split apart the rows read from @@ -178,7 +179,8 @@ this.cpuStats = cpuStats; } - + + @Override public List<String> getCommand() { final List<String> command = new LinkedList<String>(); @@ -203,14 +205,13 @@ } - /** - * Declares the counters that we will collect - */ + @Override public CounterSet getCounters() { final CounterSet root = new CounterSet(); - inst = new LinkedList<I>(); + @SuppressWarnings("rawtypes") + final List<I> inst = new LinkedList<I>(); /* * Note: Counters are all declared as Double to facilitate aggregation. @@ -249,24 +250,22 @@ inst.add(new DI(IHostCounters.CPU_PercentUserTime, .01d)); // Note: column sy inst.add(new DI(IHostCounters.CPU_PercentSystemTime, .01d)); -// // Note: IO Wait is NOT reported by vmstat. +// // Note: IO Wait is NOT reported by iostat. // inst.add(new DI(IHostCounters.CPU_PercentIOWait, .01d)); } - for (Iterator<I> itr = inst.iterator(); itr.hasNext();) { + for (@SuppressWarnings("rawtypes") I i : inst) { - final I i = itr.next(); + root.addCounter(i.getPath(), i); - root.addCounter(i.getPath(), i); + } - } - return root; } - private List<I> inst = null; + @Override public AbstractProcessReader getProcessReader() { return new IOStatReader(); @@ -300,6 +299,7 @@ */ private class IOStatReader extends ProcessReaderHelper { + @Override protected ActiveProcess getActiveProcess() { if (activeProcess == null) @@ -427,7 +427,7 @@ try { // timestamp - lastModified = System.currentTimeMillis(); + lastModified.set(System.currentTimeMillis()); final String[] fields = pattern .split(data.trim(), 0/* limit */); Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/osx/VMStatCollector.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/osx/VMStatCollector.java 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/osx/VMStatCollector.java 2014-05-13 16:15:33 UTC (rev 8294) @@ -28,11 +28,11 @@ package com.bigdata.counters.osx; -import java.util.HashMap; -import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicLong; import java.util.regex.Pattern; import com.bigdata.counters.AbstractProcessCollector; @@ -72,17 +72,19 @@ } - public I(String path) { - - assert path != null; - + public I(final String path) { + + if (path == null) + throw new IllegalArgumentException(); + this.path = path; } + @Override public long lastModified() { - return lastModified; + return lastModified.get(); } @@ -90,6 +92,7 @@ * @throws UnsupportedOperationException * always. */ + @Override public void setValue(T value, long timestamp) { throw new UnsupportedOperationException(); @@ -108,20 +111,20 @@ protected final double scale; DI(final String path) { - - this(path,1d); + this(path, 1d); + } DI(final String path, final double scale) { - - super( path ); - + + super(path); + this.scale = scale; - + } - - + + @Override public Double getValue() { final Double value = (Double) vals.get(path); @@ -143,12 +146,13 @@ * are paths into the {@link CounterSet}. The values are the data most * recently read from <code>vmstat</code>. */ - final private Map<String, Object> vals = new HashMap<String, Object>(); - + private final Map<String, Object> vals = new ConcurrentHashMap<String, Object>(); + /** * The timestamp associated with the most recently collected values. */ - private long lastModified = System.currentTimeMillis(); + private final AtomicLong lastModified = new AtomicLong( + System.currentTimeMillis()); /** * The {@link Pattern} used to split apart the rows read from @@ -166,7 +170,8 @@ super(interval); } - + + @Override public List<String> getCommand() { final List<String> command = new LinkedList<String>(); @@ -180,14 +185,13 @@ } - /** - * Declares the counters that we will collect - */ + @Override public CounterSet getCounters() { final CounterSet root = new CounterSet(); - inst = new LinkedList<I>(); + @SuppressWarnings("rawtypes") + final List<I> inst = new LinkedList<I>(); /* * Note: Counters are all declared as Double to facilitate aggregation. @@ -209,19 +213,17 @@ */ inst.add(new DI(IHostCounters.Memory_Bytes_Free)); - for (Iterator<I> itr = inst.iterator(); itr.hasNext();) { + for (@SuppressWarnings("rawtypes") I i : inst) { - final I i = itr.next(); + root.addCounter(i.getPath(), i); - root.addCounter(i.getPath(), i); + } - } - - return root; + return root; } - private List<I> inst = null; + @Override public AbstractProcessReader getProcessReader() { return new VMStatReader(); @@ -249,6 +251,7 @@ */ private class VMStatReader extends ProcessReaderHelper { + @Override protected ActiveProcess getActiveProcess() { if (activeProcess == null) @@ -357,7 +360,7 @@ try { // timestamp - lastModified = System.currentTimeMillis(); + lastModified.set(System.currentTimeMillis()); final String[] fields = pattern.split(data.trim(), 0/* limit */); Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/CounterSetBTreeSelector.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/CounterSetBTreeSelector.java 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/CounterSetBTreeSelector.java 2014-05-13 16:15:33 UTC (rev 8294) @@ -32,8 +32,6 @@ import java.util.Vector; import java.util.regex.Pattern; -import org.apache.log4j.Logger; - import com.bigdata.counters.CounterSet; import com.bigdata.counters.ICounter; import com.bigdata.counters.PeriodEnum; @@ -43,11 +41,10 @@ * Reads the relevant performance counter data from the store. * * @author <a href="mailto:tho...@us...">Bryan Thompson</a> - * @version $Id$ */ public class CounterSetBTreeSelector implements ICounterSelector { - protected static final Logger log = Logger.getLogger(CounterSetBTreeSelector.class); +// private static final Logger log = Logger.getLogger(CounterSetBTreeSelector.class); private final CounterSetBTree btree; @@ -65,9 +62,12 @@ } + @Override + @SuppressWarnings("rawtypes") public ICounter[] selectCounters(final int depth, final Pattern pattern, - final long fromTime, final long toTime, final PeriodEnum period) { - + final long fromTime, final long toTime, final PeriodEnum period, + final boolean historyRequiredIsIgnored) { + final CounterSet counterSet = btree.rangeIterator(fromTime, toTime, period.toTimeUnit(), pattern, depth); Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/CounterSetSelector.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/CounterSetSelector.java 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/CounterSetSelector.java 2014-05-13 16:15:33 UTC (rev 8294) @@ -43,11 +43,10 @@ * Reads counters from a {@link CounterSet}. * * @author <a href="mailto:tho...@us...">Bryan Thompson</a> - * @version $Id$ */ public class CounterSetSelector implements ICounterSelector { - protected static final Logger log = Logger.getLogger(CounterSetSelector.class); + private static final Logger log = Logger.getLogger(CounterSetSelector.class); private final CounterSet counterSet; @@ -70,10 +69,13 @@ * Note: logic was modified to no longer consider the relative depth, only * the absolute depth. * - * FIXME does not use [fromTime, toTime, or period]. + * FIXME does not use [fromTime, toTime, or period] (or model.path) */ + @Override + @SuppressWarnings("rawtypes") public ICounter[] selectCounters(final int depth, final Pattern pattern, - final long fromTime, final long toTime, final PeriodEnum period) { + final long fromTime, final long toTime, final PeriodEnum period, + final boolean historyRequired) { // // depth of the hierarchy at the point where we are starting. // final int ourDepth = counterSet.getDepth(); @@ -94,11 +96,15 @@ nscanned++; - if(!(c.getInstrument() instanceof HistoryInstrument)) { + if (log.isDebugEnabled()) + log.debug("considering: " + c.getPath()); + + if (historyRequired + && !(c.getInstrument() instanceof HistoryInstrument)) { // prune non-history counters. if (log.isDebugEnabled()) - log.debug("skipping: " + c.getPath()); + log.debug("skipping (history): " + c.getPath()); nskipped++; @@ -106,9 +112,6 @@ } - if (log.isDebugEnabled()) - log.debug("considering: " + c.getPath()); - if (depth != 0) { final int counterDepth = c.getDepth(); @@ -117,7 +120,7 @@ // prune by depth if (log.isDebugEnabled()) - log.debug("skipping: " + c.getPath()); + log.debug("skipping (depth): " + c.getPath()); nskipped++; Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/ICounterSelector.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/ICounterSelector.java 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/ICounterSelector.java 2014-05-13 16:15:33 UTC (rev 8294) @@ -30,6 +30,7 @@ import java.util.regex.Pattern; +import com.bigdata.counters.HistoryInstrument; import com.bigdata.counters.ICounter; import com.bigdata.counters.PeriodEnum; @@ -37,7 +38,6 @@ * Interface for selecting counters. * * @author <a href="mailto:tho...@us...">Bryan Thompson</a> - * @version $Id$ */ public interface ICounterSelector { @@ -57,10 +57,16 @@ * counter timestamps which will be selected. * @param period * The unit of aggregation for the selected performance counters. - * + * @param historyRequired + * When <code>true</code> the {@link ICounter} will be ignored + * unless it is associated with a {@link HistoryInstrument}. + * (This used to be the default behavior, but there are use cases + * where we do not need to have history.) + * * @return The selected performance counters. */ + @SuppressWarnings("rawtypes") ICounter[] selectCounters(int depth, Pattern pattern, long fromTime, - long toTime, PeriodEnum period); + long toTime, PeriodEnum period, boolean historyRequired); } Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/URLQueryModel.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/URLQueryModel.java 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/query/URLQueryModel.java 2014-05-13 16:15:33 UTC (rev 8294) @@ -52,7 +52,6 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; - import org.apache.log4j.Logger; import com.bigdata.counters.History; @@ -343,6 +342,70 @@ */ final public File file; + @Override + public String toString() { + + final StringBuilder sb = new StringBuilder(); + + sb.append(URLQueryModel.class.getName()); + + sb.append("{uri=" + uri); + + sb.append(",params=" + params); + + sb.append(",path=" + path); + + sb.append(",depth=" + depth); + + sb.append(",reportType=" + reportType); + + sb.append(",mimeType=" + mimeType); + + sb.append(",pattern=" + pattern); + + sb.append(",category=" + + (category == null ? "N/A" : Arrays.toString(category))); + + sb.append(",period=" + period); + + sb.append(",[fromTime=" + fromTime); + + sb.append(",toTime=" + toTime + "]"); + + sb.append(",flot=" + flot); + + if (eventOrderBy != null) { + sb.append(",eventOrderBy=["); + boolean first = true; + for (Field f : eventOrderBy) { + if (!first) + sb.append(","); + sb.append(f.getName()); + first = false; + } + sb.append("]"); + } + + if (eventFilters != null && !eventFilters.isEmpty()) { + sb.append(",eventFilters{"); + boolean first = true; + for (Map.Entry<Field, Pattern> e : eventFilters.entrySet()) { + if (!first) + sb.append(","); + sb.append(e.getKey().getName()); + sb.append("="); + sb.append(e.getValue()); + first = false; + } + sb.append("}"); + } + + sb.append("}"); + + return sb.toString(); + + } + /** * Factory for {@link NanoHTTPD} integration. * @@ -396,7 +459,10 @@ * * @param service * The service object IFF one was specified when - * {@link CounterSetHTTPD} was started. + * {@link CounterSetHTTPD} was started. If this implements the + * {@link IEventReportingService} interface, then events can also + * be requested. + * * @param req * The request. * @param resp @@ -412,7 +478,7 @@ final LinkedHashMap<String, Vector<String>> params = new LinkedHashMap<String, Vector<String>>(); - @SuppressWarnings("unchecked") +// @SuppressWarnings("unchecked") final Enumeration<String> enames = req.getParameterNames(); while (enames.hasMoreElements()) { Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/render/TextRenderer.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/render/TextRenderer.java 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/render/TextRenderer.java 2014-05-13 16:15:33 UTC (rev 8294) @@ -48,12 +48,12 @@ /** * Describes the state of the controller. */ - public final URLQueryModel model; + private final URLQueryModel model; /** * Selects the counters to be rendered. */ - final ICounterSelector counterSelector; + private final ICounterSelector counterSelector; /** * @param model @@ -77,7 +77,8 @@ } - public void render(Writer w) throws IOException { + @Override + public void render(final Writer w) throws IOException { final IRenderer renderer; @@ -85,8 +86,10 @@ case correlated: { - final ICounter[] counters = counterSelector.selectCounters(model.depth, - model.pattern, model.fromTime, model.toTime, model.period); + @SuppressWarnings("rawtypes") + final ICounter[] counters = counterSelector.selectCounters( + model.depth, model.pattern, model.fromTime, model.toTime, + model.period, true/* historyRequired */); final HistoryTable historyTable = new HistoryTable(counters, model.period); @@ -100,9 +103,11 @@ case pivot: { - final ICounter[] counters = counterSelector.selectCounters(model.depth, - model.pattern, model.fromTime, model.toTime, model.period); - + @SuppressWarnings("rawtypes") + final ICounter[] counters = counterSelector.selectCounters( + model.depth, model.pattern, model.fromTime, model.toTime, + model.period, true/* historyRequired */); + final HistoryTable historyTable = new HistoryTable(counters, model.period); Modified: branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/render/XHTMLRenderer.java =================================================================== --- branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/render/XHTMLRenderer.java 2014-05-13 12:41:52 UTC (rev 8293) +++ branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/counters/render/XHTMLRenderer.java 2014-05-13 16:15:33 UTC (rev 8294) @@ -48,11 +48,10 @@ * name. * * @author <a href="mailto:tho...@us...">Bryan Thompson</a> - * @version $Id$ */ public class XHTMLRenderer implements IRenderer { - final static protected Logger log = Logger.getLogger(XHTMLRenderer.class); + final static private Logger log = Logger.getLogger(XHTMLRenderer.class); final public static String ps = ICounterSet.pathSeparator; @@ -68,12 +67,12 @@ /** * Describes the state of the controller. */ - public final URLQueryModel model; + private final URLQueryModel model; /** * Selects the counters to be rendered. */ - public final ICounterSelector counterSelector; + private final ICounterSelector counterSelector; /** * @param model @@ -101,6 +100,7 @@ * @param w * @throws IOException */ + @Override public void render(final Writer w) throws IOException { writeXmlDecl(w); @@ -117,7 +117,7 @@ } - protected void writeXmlDecl(Writer w) throws IOException { + protected void writeXmlDecl(final Writer w) throws IOException { w.write("<?xml version=\"1.0\" encoding=\"" + encoding + "\"?>\n"); @@ -128,7 +128,7 @@ * @param w * @throws IOException */ - protected void writeDocType(Writer w) throws IOException { + protected void writeDocType(final Writer w) throws IOException { // if(true) return; @@ -143,7 +143,7 @@ } /** The start <code>html</code> tag. */ - protected void writeHtml(Writer w) throws IOException { + protected void writeHtml(final Writer w) throws IOException { w.write("<html "); @@ -159,7 +159,7 @@ } - protected void writeHead(Writer w) throws IOException { + protected void writeHead(final Writer w) throws IOException { w.write("<head\n>"); @@ -170,13 +170,13 @@ w.write("</head\n>"); } - protected void writeTitle(Writer w) throws IOException { + protected void writeTitle(final Writer w) throws IOException { w.write("<title>bigdata(tm) telemetry : "+cdata(model.path)+"</title\n>"); } - protected void writeScripts(Writer w) throws IOException { + protected void writeScripts(final Writer w) throws IOException { if (model.flot) { @@ -195,7 +195,8 @@ protected void writeBody(final Writer w) throws IOException { w.write("<body\n>"); - + + // Navigate to the node selected by the path. final ICounterNode node = ((CounterSetSelector) counterSelector) .getRoot().getPath(model.path); @@ -221,7 +222,7 @@ if(node instanceof ICounter) { - writeCounter(w, (ICounter) node); + writeCounter(w, (ICounter<?>) node); } else { @@ -241,17 +242,17 @@ writeHistoryTable(w, counterSelector.selectCounters( model.depth, model.pattern, model.fromTime, - model.toTime, model.period), model.period, - model.timestampFormat); + model.toTime, model.period, true/* historyRequired */), + model.period, model.timestampFormat); break; case pivot: - writePivotTable(w, counterSelector.selectCounters( - model.depth, model.pattern, model.fromTime, - model.toTime, model.period)); - + writePivotTable(w, counterSelector.selectCounters(model.depth, + model.pattern, model.fromTime, model.toTime, + model.period, true/* historyRequired */)); + break; case events: @@ -280,7 +281,7 @@ * * @deprecated by refactor inside of a rendering object. */ - protected void writeFullPath(Writer w, String path) + protected void writeFullPath(final Writer w, final String path) throws IOException { writePath(w, path, 0/* root */); @@ -296,8 +297,8 @@ * * @deprecated by refactor inside of a rendering object. */ - protected void writePath(Writer w, String path, int rootDepth) - throws IOException { + protected void writePath(final Writer w, final String path, + final int rootDepth) throws IOException { final String[] a = path.split(ps); @@ -393,7 +394,7 @@ * {@link CounterSet} in a single table (this is the navigational view of * the counter set hierarchy). */ - protected void writeCounterSet(Writer w, final CounterSet counterSet, + protected void writeCounterSet(final Writer w, final CounterSet counterSet, final int depth) throws IOException { // depth of the hierarchy at the point where we are starting. @@ -480,7 +481,7 @@ } else { - final ICounter counter = (ICounter) node; + final ICounter<?> counter = (ICounter<?>) node; /* * write out values for the counter. @@ -502,7 +503,7 @@ * LBS. */ - HistoryInstrument inst = (HistoryInstrument) counter + final HistoryInstrument<?> inst = (HistoryInstrument<?>) counter .getInstrument(); w.write(" <td>" + value(counter,inst.minutes.getAverage()) @@ -553,7 +554,8 @@ * * @throws IOException */ - protected void writeCounter(final Writer w, final ICounter counter) + protected void writeCounter(final Writer w, + @SuppressWarnings("rawtypes") final ICounter counter) throws IOException { if (counter.getInstrument() instanceof HistoryInstrument) { @@ -746,17 +748,16 @@ /** * * @author <a href="mailto:tho...@us...">Bryan Thompson</a> - * @version $Id$ */ public class HTMLValueFormatter extends ValueFormatter { - protected final URLQueryModel model; + private final URLQueryModel model; /** * * @param model */ - public HTMLValueFormatter(URLQueryModel model) { + public HTMLValueFormatter(final URLQueryModel model) { super(model); @@ -769,16 +770,19 @@ * for inclusion in a CDATA section (we do both operations together so that * we can format {@link IServiceCounters#LOCAL_HTTPD} as a link anchor. */ - public String value(final ICounter counter, final Object val) { + public String value( + @SuppressWarnings("rawtypes") final ICounter counter, + final Object val) { - return XHTMLRenderer.this.value(counter,val); + return XHTMLRenderer.this.value(counter, val); } /** * A clickable trail of the path from the root. */ - public void writeFullPath(Writer w, String path) + @Override + public void writeFullPath(final Writer w, final String path) throws IOException { writePath(w, path, 0/* root */); @@ -792,8 +796,9 @@ * The path components will be shown beginning at this depth - * ZERO (0) is the root. */ - public void writePath(Writer w, String path, int rootDepth) - throws IOException { + @Override + public void writePath(final Writer w, final String path, + final int rootDepth) throws IOException { final String[] a = path.split(ps); @@ -838,7 +843,8 @@ if(rootDepth!=0 && n==rootDepth) { w.write("<a href=\"" - + model.getRequestURL(new URLQueryParam[] { new URLQueryParam(URLQueryModel.PATH, prefix) }) + "\">"); + + model.getRequestURL(new URLQueryParam[] { new URLQueryParam( + URLQueryModel.PATH, prefix) }) + "\">"); w.write("..."); @@ -851,8 +857,9 @@ w.write(" "); w.write("<a href=\"" - + model.getRequestURL(new URLQueryParam[] { new URLQueryParam(URLQueryModel.PATH, sb - .toString()) }) + "\">"); + + model.getRequestURL(new URLQueryParam[] { new URLQueryParam( + URLQueryModel.PATH, sb.toString()) }) + + "\">"); // current path component. w.write(cdata(name)); @@ -889,9 +896,11 @@ * if any element of <i>a</i> does not use a * {@link HistoryInstrument}. * - * @todo review use of basePeriod - this is {@link URLQueryModel#period}, right? + * @todo review use of basePeriod - this is {@link URLQueryModel#period}, + * right? */ - protected void writeHistoryTable(final Writer w, final ICounter[] a, + protected void writeHistoryTable(final Writer w, + @SuppressWarnings("rawtypes") final ICo... [truncated message content] |
From: <dme...@us...> - 2014-05-13 12:41:54
|
Revision: 8293 http://sourceforge.net/p/bigdata/code/8293 Author: dmekonnen Date: 2014-05-13 12:41:52 +0000 (Tue, 13 May 2014) Log Message: ----------- ant target name change Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/nss.rb Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/nss.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/nss.rb 2014-05-13 12:15:52 UTC (rev 8292) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/nss.rb 2014-05-13 12:41:52 UTC (rev 8293) @@ -34,7 +34,7 @@ user 'ubuntu' group 'ubuntu' cwd "/home/ubuntu/#{node['bigdata'][:source]}" - command "ant package-brew-nss" + command "ant package-nss-brew" end execute "Extract and relocate the bigdata archive" do This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-13 12:15:56
|
Revision: 8292 http://sourceforge.net/p/bigdata/code/8292 Author: dmekonnen Date: 2014-05-13 12:15:52 +0000 (Tue, 13 May 2014) Log Message: ----------- adding overwrite="true" to package-nss-brew files to insure templated files are archived Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/build.xml Modified: branches/DEPLOYMENT_BRANCH_1_3_1/build.xml =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/build.xml 2014-05-13 10:36:35 UTC (rev 8291) +++ branches/DEPLOYMENT_BRANCH_1_3_1/build.xml 2014-05-13 12:15:52 UTC (rev 8292) @@ -1228,11 +1228,11 @@ todir="${dist.bin}" /> <chmod file="${dist.bin}/startNSS" perm="755" /> <copy file="${src.resources}/deployment/nss/etc/jetty.xml" - todir="${dist.var.jetty}/etc" /> + todir="${dist.var.jetty}/etc" overwrite="true" /> <copy file="${src.resources}/deployment/nss/WEB-INF/RWStore.properties" - todir="${dist.var.jetty}/WEB-INF" /> + todir="${dist.var.jetty}/WEB-INF" overwrite="true" /> <copy file="${src.resources}/deployment/nss/WEB-INF/classes/log4j.properties" - todir="${dist.var.jetty}/WEB-INF/classes" /> + todir="${dist.var.jetty}/WEB-INF/classes" overwrite="true" /> </target> @@ -1313,7 +1313,7 @@ </target> - <target name="package-brew-nss" depends="clean, stage" + <target name="package-nss-brew" depends="clean, stage" description="Create compressed tar file for Jetty based deployment via Brew and Chef installers."> <tar destfile="${bigdata.dir}/REL-NSS.${version}.tgz" This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-13 10:36:37
|
Revision: 8291 http://sourceforge.net/p/bigdata/code/8291 Author: dmekonnen Date: 2014-05-13 10:36:35 +0000 (Tue, 13 May 2014) Log Message: ----------- Removed Paths: ------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/README.md branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/files/ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/metadata.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/templates/ Deleted: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/README.md =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/README.md 2014-05-13 10:34:20 UTC (rev 8290) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/README.md 2014-05-13 10:36:35 UTC (rev 8291) @@ -1,114 +0,0 @@ -systap-bigdata Cookbook -====================== -This cookbook provides [http://www.bigdata.com/bigdata/blog/](bigdata v1.3.0) under Tomcat 7 (latest), with Oracle JDK 7 (latest) within an Ubuntu 12.0.4 VM. - -Typical synopsis: - - % vagrant up - - The bigdata service is then available at: http://33.33.33.10:8080/bigdata/ - - -Requirements ------------- - -#### packages -In a stand alone context, this cookbook assumes the following resources have been installed: - -* `VirtualBox` - Virtual machine provider [http://virtualbox.org/](http://virtualbox.org/) -* `Vagrant` - Environment assembler [http://vagrantup.com/](http://vagrantup.com/) -* `Berkshelf` - The Berkshelf cookbook manager [http://berkshelf.com/](http://berkshelf.com/). - - -#### cookbook dependencies -Chef 10.14.2 or higher - has not been tested with previous versions. - -The following Opscode cookbooks are dependencies (automatically retrieved by `Berkshelf`): - -* apt -* java -* tomcat - - - -Attributes ----------- - - -#### systap-bigdata::default -<table> - <tr> - <th>Key</th> - <th>Type</th> - <th>Description</th> - <th>Default</th> - </tr> - <tr> - <td><tt>url</tt></td> - <td>String</td> - <td>where to download the bigdata.war file form</td> - <td><tt>http://sourceforge.net/projects/bigdata/ - files/bigdata/1.3.0/bigdata.war/download</tt></td> - </tr> - <tr> - <td><tt>home</tt></td> - <td>String</td> - <td>where the extracted bigdata.war contents are found</td> - <td>Default: <tt>/var/lib/tomcat7/webapps/bigdata</tt></td> - </tr> - <tr> - <td><tt>etc</tt></td> - <td>String</td> - <td>Where under the tomcat root the log files and the bigdata.jni should reside. Discussed in <a href="http://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer#Common_Startup_Problems">"Common Startup Problmems</a></td> - <td>Default: <tt>/var/lib/tomcat7/webapps/bigdata/etc</tt></td> - </tr> -</table> - - -Usage ------ -### Stand Alone Context -To bring the VM up the first time, or any future time after a `halt`, invoke from the cookbook directory: - - % vagrant up - -The cookbbok will retrieve the Ubuntu 12.04 VM, Oracle's JDK 7, Apahce's Tomcat 7 and the Bigdata WAR file. These downloads may take a significant amount of time to complete. Should a download be interupted or some other error occur, continue with: - - % vagrant provision - -Once complete, the bigdata server will be available under: - [http://33.33.33.10:8080/bigdata/](http://33.33.33.10:8080/bigdata/) - -To halt the VM: - - % vagrant halt - -To delete the VM and from VirtualBox: - - % vagrant destroy - -To login into the VM: - - % vagrant ssh - - -### Cookbook Context - -To use as a reciple in new cookbook, just include `systap-bigdata` in your node's `run_list` in the standard way: - -``` - "run_list": [ - "recipe[systap-bigdata::default]", - ... - ] - -``` - - -License and Authors -------------------- -Author:: Daniel Mekonnen [daniel<no-spam-at>systap.com] - -``` -This pakcage may be resiributed under the same terms and conditions as the Bigdata project that it is a part of. -``` Deleted: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/metadata.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/metadata.rb 2014-05-13 10:34:20 UTC (rev 8290) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/metadata.rb 2014-05-13 10:36:35 UTC (rev 8291) @@ -1,12 +0,0 @@ -name 'bigdata' -maintainer 'Daniel Mekonnen' -maintainer_email 'daniel<no-spam-at>systap.com' -license 'All rights reserved' -description 'Installs/Configures bigdata' -long_description IO.read(File.join(File.dirname(__FILE__), 'README.md')) -version '0.1.1' -depends 'apt' -depends 'java', '>= 1.22.0' -depends 'ant' -depends 'tomcat' -depends 'subversion' This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-13 10:34:22
|
Revision: 8290 http://sourceforge.net/p/bigdata/code/8290 Author: dmekonnen Date: 2014-05-13 10:34:20 +0000 (Tue, 13 May 2014) Log Message: ----------- separating chef from vagrant Added Paths: ----------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/default.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/java7.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/mapgraph.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/nss.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/tomcat.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/templates/ Removed Paths: ------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/default.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/default.rb Copied: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md (from rev 8231, branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/README.md) =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/README.md 2014-05-13 10:34:20 UTC (rev 8290) @@ -0,0 +1,114 @@ +systap-bigdata Cookbook +====================== +This cookbook provides [http://www.bigdata.com/bigdata/blog/](bigdata v1.3.0) under Tomcat 7 (latest), with Oracle JDK 7 (latest) within an Ubuntu 12.0.4 VM. + +Typical synopsis: + + % vagrant up + + The bigdata service is then available at: http://33.33.33.10:8080/bigdata/ + + +Requirements +------------ + +#### packages +In a stand alone context, this cookbook assumes the following resources have been installed: + +* `VirtualBox` - Virtual machine provider [http://virtualbox.org/](http://virtualbox.org/) +* `Vagrant` - Environment assembler [http://vagrantup.com/](http://vagrantup.com/) +* `Berkshelf` - The Berkshelf cookbook manager [http://berkshelf.com/](http://berkshelf.com/). + + +#### cookbook dependencies +Chef 10.14.2 or higher - has not been tested with previous versions. + +The following Opscode cookbooks are dependencies (automatically retrieved by `Berkshelf`): + +* apt +* java +* tomcat + + + +Attributes +---------- + + +#### systap-bigdata::default +<table> + <tr> + <th>Key</th> + <th>Type</th> + <th>Description</th> + <th>Default</th> + </tr> + <tr> + <td><tt>url</tt></td> + <td>String</td> + <td>where to download the bigdata.war file form</td> + <td><tt>http://sourceforge.net/projects/bigdata/ + files/bigdata/1.3.0/bigdata.war/download</tt></td> + </tr> + <tr> + <td><tt>home</tt></td> + <td>String</td> + <td>where the extracted bigdata.war contents are found</td> + <td>Default: <tt>/var/lib/tomcat7/webapps/bigdata</tt></td> + </tr> + <tr> + <td><tt>etc</tt></td> + <td>String</td> + <td>Where under the tomcat root the log files and the bigdata.jni should reside. Discussed in <a href="http://sourceforge.net/apps/mediawiki/bigdata/index.php?title=NanoSparqlServer#Common_Startup_Problems">"Common Startup Problmems</a></td> + <td>Default: <tt>/var/lib/tomcat7/webapps/bigdata/etc</tt></td> + </tr> +</table> + + +Usage +----- +### Stand Alone Context +To bring the VM up the first time, or any future time after a `halt`, invoke from the cookbook directory: + + % vagrant up + +The cookbbok will retrieve the Ubuntu 12.04 VM, Oracle's JDK 7, Apahce's Tomcat 7 and the Bigdata WAR file. These downloads may take a significant amount of time to complete. Should a download be interupted or some other error occur, continue with: + + % vagrant provision + +Once complete, the bigdata server will be available under: + [http://33.33.33.10:8080/bigdata/](http://33.33.33.10:8080/bigdata/) + +To halt the VM: + + % vagrant halt + +To delete the VM and from VirtualBox: + + % vagrant destroy + +To login into the VM: + + % vagrant ssh + + +### Cookbook Context + +To use as a reciple in new cookbook, just include `systap-bigdata` in your node's `run_list` in the standard way: + +``` + "run_list": [ + "recipe[systap-bigdata::default]", + ... + ] + +``` + + +License and Authors +------------------- +Author:: Daniel Mekonnen [daniel<no-spam-at>systap.com] + +``` +This pakcage may be resiributed under the same terms and conditions as the Bigdata project that it is a part of. +``` Deleted: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb 2014-05-06 13:54:41 UTC (rev 8207) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/default.rb 2014-05-13 10:34:20 UTC (rev 8290) @@ -1,6 +0,0 @@ -default['tomcat']['base_version'] = 7 -default['systap-bigdata'][:url] = "http://sourceforge.net/projects/bigdata/files/bigdata/1.3.0/bigdata.war/download" - -webapp_dir = node['tomcat']['webapp_dir'] -default['systap-bigdata'][:home] = webapp_dir + "/bigdata" -default['systap-bigdata'][:etc] = webapp_dir + "/bigdata/etc" Copied: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/default.rb (from rev 8286, branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb) =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/default.rb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/attributes/default.rb 2014-05-13 10:34:20 UTC (rev 8290) @@ -0,0 +1,88 @@ + +default['bigdata'][:home] = "/var/lib/bigdata" + +# Who runs bigdata? +default['bigdata'][:user] = "bigdata" +default['bigdata'][:group] = "bigdata" + +default['bigdata'][:properties] = default['bigdata'][:home] + "/RWStore.properties" + +default['bigdata'][:source] = "bigdata-code" + +case node['bigdata'][:install_type] +when "nss" + default['bigdata'][:url] = "http://bigdata.com/deploy/bigdata-1.3.0.tgz" + + # Where the jetty resourceBase is defined: + default['bigdata'][:jetty_dir] = node['bigdata'][:home] + "/var/jetty" + + # Where the log files will live: + default['bigdata'][:log_dir] = node['bigdata'][:home] + "/var/log" + + # Where the bigdata-ha.jnl file will live: + default['bigdata'][:data_dir] = node['bigdata'][:home] + "/var/data" + + if node['bigdata'][:build_from_svn] + default['bigdata'][:svn_branch] = "https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT_BRANCH_1_3_1" + end +when "tomcat" + default['tomcat'][:base_version] = 7 + default['tomcat'][:java_options] = "-Djava.awt.headless=true -server -Xmx4G -XX:+UseG1GC" + + default['bigdata'][:url] = "http://softlayer-dal.dl.sourceforge.net/project/bigdata/bigdata/1.3.0/bigdata.war" + + default['bigdata'][:web_home] = default['tomcat'][:webapp_dir] + "/bigdata" + default['bigdata'][:web_xml] = default['bigdata'][:web_home] + "/WEB-INF/web.xml" + default['bigdata'][:log4j_properties] = default['bigdata'][:web_home] + "/WEB-INF/classes/log4j.properties" + + # Where the bigdata-ha.jnl file will live: + default['bigdata'][:data_dir] = node['bigdata'][:home] + "/data" + + # Where the log files will live: + default['bigdata'][:log_dir] = node['bigdata'][:home] + "/log" + + if node['bigdata'][:build_from_svn] + default['bigdata'][:svn_branch] = "https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA_RELEASE_1_3_0" + end +end + + +############################################################## +# +# Set the RWStore.properties attributes that apply for all +# installation scenarios. +# +############################################################## + + +default['bigdata']['journal.AbstractJournal.bufferMode'] = "DiskRW" + +# Setup for the RWStore recycler rather than session protection. +default['bigdata']['service.AbstractTransactionService.minReleaseAge']= "1" + +default['bigdata']['btree.writeRetentionQueue.capacity'] = "4000" +default['bigdata']['btree.BTree.branchingFactor'] = "128" + +# 200M initial extent. +default['bigdata']['journal.AbstractJournal.initialExtent'] = "209715200" +default['bigdata']['journal.AbstractJournal.maximumExtent'] = "209715200" + +## +## Setup for QUADS mode without the full text index. +## +default['bigdata']['rdf.sail.truthMaintenance'] = "false" +default['bigdata']['rdf.store.AbstractTripleStore.quads'] = "false" +default['bigdata']['rdf.store.AbstractTripleStore.statementIdentifiers'] = "false" +default['bigdata']['rdf.store.AbstractTripleStore.textIndex'] = "false" +default['bigdata']['rdf.store.AbstractTripleStore.axiomsClass'] = "com.bigdata.rdf.axioms.NoAxioms" + +# Bump up the branching factor for the lexicon indices on the default kb. +default['bigdata']['namespace.kb.lex.com.bigdata.btree.BTree.branchingFactor'] = "400" + +# Bump up the branching factor for the statement indices on the default kb. +default['bigdata']['namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor'] = "1024" +default['bigdata']['rdf.sail.bufferCapacity'] = "100000" +# default['bigdata']['rdf.store.AbstractTripleStore.vocabularyClass'] = "" + +default['mapgraph'][:source] = "mapgraph-code" +default['mapgraph'][:svn_branch] = "https://svn.code.sf.net/p/mpgraph/code/trunk" Copied: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb (from rev 8237, branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/metadata.rb) =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/metadata.rb 2014-05-13 10:34:20 UTC (rev 8290) @@ -0,0 +1,12 @@ +name 'bigdata' +maintainer 'Daniel Mekonnen' +maintainer_email 'daniel<no-spam-at>systap.com' +license 'All rights reserved' +description 'Installs/Configures bigdata' +long_description IO.read(File.join(File.dirname(__FILE__), 'README.md')) +version '0.1.1' +depends 'apt' +depends 'java', '>= 1.22.0' +depends 'ant' +depends 'tomcat' +depends 'subversion' Deleted: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/default.rb 2014-05-06 13:54:41 UTC (rev 8207) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/default.rb 2014-05-13 10:34:20 UTC (rev 8290) @@ -1,60 +0,0 @@ -# -# Cookbook Name:: systap-bigdata -# Recipe:: default -# -# Copyright 2013, Systap -# -# -include_recipe "tomcat" - -# -# Install the WAR file: -# -remote_file "#{node['tomcat']['webapp_dir']}/bigdata.war" do - source node['systap-bigdata'][:url] - owner node['tomcat']['user'] - group node['tomcat']['group'] -end - - -# -# Configuration for Tomcat on Ubuntu: -# -directory node['systap-bigdata'][:etc] do - owner node['tomcat']['user'] - group node['tomcat']['group'] - mode 00755 - action :create - # - # This is a little hackish. We need to wait for tomcat to extract the bigdata.war file before we can modify - # resources within the bigdata folder. We'll attempt to update this to use the chef notification system later. - # - retries 3 - retry_delay 10 -end - -execute "set absolute path for RWStore.properties" do - cwd "#{node['systap-bigdata'][:home]}/WEB-INF" - command "sed -i 's|<param-value>../webapps/bigdata/RWStore.properties|<param-value>#{node['systap-bigdata'][:home]}/RWStore.properties|' web.xml" -end - -execute "set path for bigdata.jnl file" do - cwd "#{node['systap-bigdata'][:home]}" - command "sed -i 's|=bigdata.jnl|=#{node['systap-bigdata'][:etc]}/bigdata.jnl|' RWStore.properties" -end - - -execute "set ruleLog in log4j.properties " do - cwd "#{node['systap-bigdata'][:home]}/WEB-INF/classes" - command "sed -i 's|log4j.appender.queryLog.File=queryLog.csv|log4j.appender.queryLog.File=#{node['systap-bigdata'][:etc]}/queryLog.csv|' log4j.properties" -end - -execute "set ruleLog in log4j.properties " do - cwd "#{node['systap-bigdata'][:home]}/WEB-INF/classes" - command "sed -i 's|log4j.appender.ruleLog.File=rules.log|log4j.appender.ruleLog.File=#{node['systap-bigdata'][:etc]}/rules.log|' log4j.properties" -end - -execute "set ruleLog in log4j.properties " do - cwd "#{node['systap-bigdata'][:home]}/WEB-INF/classes" - command "sed -i 's|log4j.appender.queryRunStateLog.File=queryRunStateLog.csv|log4j.appender.queryRunStateLog.File=#{node['systap-bigdata'][:etc]}/queryRunStateLog.csv|' log4j.properties" -end Copied: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/java7.rb (from rev 8232, branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/java7.rb) =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/java7.rb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/java7.rb 2014-05-13 10:34:20 UTC (rev 8290) @@ -0,0 +1,32 @@ +# http://jamie.mccrindle.org/2013/07/installing-oracle-java-7-using-chef.html +# +# Cookbook Name:: java7 +# Recipe:: default +# + +apt_repository "webupd8team" do + uri "http://ppa.launchpad.net/webupd8team/java/ubuntu" + components ['main'] + distribution node['lsb']['codename'] + keyserver "keyserver.ubuntu.com" + key "EEA14886" + deb_src true +end + +execute "remove openjdk-6" do + command "apt-get -y remove --purge openjdk-6-jdk openjdk-6-jre openjdk-6-jre-headless openjdk-6-jre-lib" +end + + +# could be improved to run only on update +execute "accept-license" do + command "echo oracle-java7-installer shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections" +end + +package "oracle-java7-installer" do + action :install +end + +package "oracle-java7-set-default" do + action :install +end Copied: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/mapgraph.rb (from rev 8285, branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/mapgraph.rb) =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/mapgraph.rb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/mapgraph.rb 2014-05-13 10:34:20 UTC (rev 8290) @@ -0,0 +1,33 @@ +# +# Cookbook Name:: systap-bigdata +# Recipe:: default +# +# Copyright 2013, Systap +# +# +execute "pull mapgraph from svn repo" do + user 'ubuntu' + group 'ubuntu' + cwd "/home/ubuntu" + command "svn checkout #{default['mapgraph'][:svn_branch]} #{node['mapgraph'][:source]}" +end + +execute "make mapgraph" do + cwd node['mapgraph'][:source] + command "make" +end + +execute "test mapgraph" do + cwd node['mapgraph'][:source] + command "./Algorithms/SSSP/SSSP -g smallRegressionGraphs/small.mtx" +end + + +# +# "recursive true" did not work here +# +# directory node['bigdata'][:mapgraph_home] do +# owner 'ec2-user' +# group 'ec2-user' +# recursive true +# end Copied: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/nss.rb (from rev 8286, branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/nss.rb) =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/nss.rb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/nss.rb 2014-05-13 10:34:20 UTC (rev 8290) @@ -0,0 +1,117 @@ +include_recipe "java" +# +# Only do the following for tomcat install +# +if node['bigdata'][:install_type] == "nss" + + group node['bigdata'][:group] do + action :create + append true + end + + user node['bigdata'][:user] do + gid node['bigdata'][:group] + supports :manage_home => true + shell "/bin/false" + home node['bigdata'][:home] + system true + action :create + end + + + if node['bigdata'][:build_from_svn] + include_recipe "ant" + include_recipe "subversion" + + execute "checkout bigdata from svn repo" do + user 'ubuntu' + group 'ubuntu' + cwd "/home/ubuntu" + command "svn checkout #{node['bigdata'][:svn_branch]} #{node['bigdata'][:source]}" + end + + execute "build the nss tar ball" do + user 'ubuntu' + group 'ubuntu' + cwd "/home/ubuntu/#{node['bigdata'][:source]}" + command "ant package-brew-nss" + end + + execute "Extract and relocate the bigdata archive" do + cwd "/var/lib" + command "tar xvf /home/ubuntu/#{node['bigdata'][:source]}/REL-NSS.bigdata-1.*.tgz" + end + else + # + # Retrieve the package prepared for Brew: + # + remote_file "/tmp/bigdata.tgz" do + owner node['bigdata'][:user] + group node['bigdata'][:group] + source node['bigdata'][:url] + end + + execute "Extract and relocate the bigdata archive" do + cwd "/var/lib" + command "tar xvf /tmp/bigdata.tgz" + end + + # + # Rename "bigbdata" to "bigdataNSS" for now. This block can be removed in the 1.3.1 update for the Brew package + # + execute "Extract and relocate the bigdata archive" do + cwd "#{node['bigdata'][:home]}/bin" + command "mv bigdata bigdataNSS" + end + end + + execute "change the ownership of the bigdata home directory to bigdata, which strangely is not" do + user "root" + group "root" + cwd node['bigdata'][:home] + command "chown -R #{node['bigdata'][:user]}:#{node['bigdata'][:group]} ." + end + + link "/etc/init.d/bigdataNSS" do + to "#{node['bigdata'][:home]}/bin/bigdataNSS" + end + + # + # We shell out to make template substitutions + # + execute "set the INSTALL_TYPE in bin/bigdata" do + cwd "#{node['bigdata'][:home]}/bin" + command "sed -i 's|<%= INSTALL_TYPE %>|#{node['bigdata'][:install_type]}|' bigdataNSS" + end + + execute "set the BD_HOME in bin/bigdata" do + cwd "#{node['bigdata'][:home]}/bin" + command "sed -i 's|<%= BD_HOME %>|#{node['bigdata'][:home]}|' bigdataNSS" + end + + execute "set the BD_HOME in RWStore.properties" do + cwd "#{node['bigdata'][:jetty_dir]}/WEB-INF" + command "sed -i 's|<%= BD_HOME %>|#{node['bigdata'][:home]}|' RWStore.properties" + end + + execute "set the BD_HOME in log4j.properties" do + cwd "#{node['bigdata'][:jetty_dir]}/WEB-INF/classes" + command "sed -i 's|<%= BD_HOME %>|#{node['bigdata'][:home]}|' log4j.properties" + end + + execute "set the JETTY_DIR in jetty.xml" do + cwd "#{node['bigdata'][:jetty_dir]}/etc/" + command "sed -i 's|<%= JETTY_DIR %>|#{node['bigdata'][:jetty_dir]}|' jetty.xml" + end + + service "bigdataNSS" do + # + # Reenable this when the bin/bigdata script is updated to return a "1" for a successful status: + # + # See: http://comments.gmane.org/gmane.comp.sysutils.chef.user/2723 + # + # supports :status => true, :start => true, :stop => true, :restart => true + supports :start => true, :stop => true, :restart => true + action [ :enable, :start ] + end +end Copied: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/tomcat.rb (from rev 8285, branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/tomcat.rb) =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/tomcat.rb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/chef/recipes/tomcat.rb 2014-05-13 10:34:20 UTC (rev 8290) @@ -0,0 +1,158 @@ +# +# Cookbook Name:: bigdata +# Recipe:: default +# +include_recipe "java" +include_recipe "tomcat" + +# include_attributes "bigdata::tomcat" + +# +# Only do the following for tomcat install +# +if node['bigdata'][:install_type] == "tomcat" + # + # The tomcat cookbook provides an /etc/default/tomcat7 file that contains multiple JAVA_OPTS lines but allows you to + # modify only one of them during installation. As a consequence JAVA_OPTS conflicts may occur. We comment out the + # 2nd JAVA_OPTS line to avoid the potential for any conflicts (which do occur with our default java_options attribute). + # + # Conflicting collector combinations in option list; please refer to the release notes for the combinations allowed + # Error: Could not create the Java Virtual Machine. + # + execute "comment out 2nd JAVA_OPTS line in /etc/default/tomcat7" do + cwd "/etc/default" + command "sed -i 's|JAVA_OPTS=\"${JAVA_OPTS} -XX:+UseConcMarkSweepGC\"|#JAVA_OPTS=\"${JAVA_OPTS} -XX:+UseConcMarkSweepGC\"|' tomcat7" + end + + + if node['bigdata'][:build_from_svn] + include_recipe "ant" + include_recipe "subversion" + + execute "checkout bigdata from svn repo" do + user 'ubuntu' + group 'ubuntu' + cwd "/home/ubuntu" + command "svn checkout #{node['bigdata'][:svn_branch]} #{node['bigdata'][:source]}" + end + + execute "build the war file" do + user 'ubuntu' + group 'ubuntu' + cwd "/home/ubuntu/#{node['bigdata'][:source]}" + command "ant war" + end + + # + # Install the WAR file: + # + remote_file "#{node['tomcat'][:webapp_dir]}/bigdata.war" do + source "file:///home/ubuntu/#{node['bigdata'][:source]}/ant-build/bigdata.war" + owner node['tomcat'][:user] + group node['tomcat'][:group] + end + + else + # + # Install the WAR file: + # + remote_file "#{node['tomcat'][:webapp_dir]}/bigdata.war" do + source node['bigdata'][:url] + owner node['tomcat'][:user] + group node['tomcat'][:group] + end + end + + # + # Create the JNL home directory + # + directory node['bigdata'][:data_dir] do + owner node['tomcat'][:user] + group node['tomcat'][:group] + mode 00755 + action :create + recursive true + end + + + # + # Create the Bigdata log home + # + directory node['bigdata'][:log_dir] do + owner node['tomcat'][:user] + group node['tomcat'][:group] + mode 00755 + action :create + recursive true + end + + + # + # Install the RWStore.properties file: + # + template node['bigdata'][:properties] do + source "RWStore.properties.erb" + owner node['tomcat'][:user] + group node['tomcat'][:group] + mode 00644 + end + + + # + # Install the log4j.properties file: + # + template node['bigdata'][:log4j_properties] do + source "log4j.properties.erb" + owner node['tomcat'][:user] + group node['tomcat'][:group] + mode 00644 + retry_delay 15 + retries 3 + end + + + # + # Delete all log files so that the error and warning messages that appeared during the installation + # process do not unnecessarily alarm anyone. + # + execute "remove log files before retart" do + cwd "#{node['tomcat'][:log_dir]}" + command "rm *" + end + + + # + # The RWStore.properties path is the only property that needs to be adjusted in the web.xml file. + # Using a sed command to adjust the property avoids the need to maintain a web.xml template which + # in turn updates frequently relative to the other property files. Thus this recipe becomes + # suitable against a larger range of bigdata releases. + # + if node['bigdata'][:build_from_svn] + execute "set absolute path for RWStore.properties" do + cwd "#{node['bigdata'][:web_home]}/WEB-INF" + command "sed -i 's|<param-value>../webapps/bigdata/WEB-INF/RWStore.properties|<param-value>#{node['bigdata'][:home]}/RWStore.properties|' web.xml" + end + + # + # Remove original RWStore.properties file to avoid user confusion + # + file "#{node['bigdata'][:web_home]}/WEB-INF/RWStore.properties" do + action :delete + end + else + # + # 1.3.0 uses a different path for RWStore.properties. We can remove this if block in 1.3.1 + # + execute "set absolute path for RWStore.properties" do + cwd "#{node['bigdata'][:web_home]}/WEB-INF" + command "sed -i 's|<param-value>../webapps/bigdata/RWStore.properties|<param-value>#{node['bigdata'][:home]}/RWStore.properties|' web.xml" + end + + # + # Remove original RWStore.properties file to avoid user confusion + # + file "#{node['bigdata'][:web_home]}/RWStore.properties" do + action :delete + end + end +end This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2014-05-12 22:24:24
|
Revision: 8289 http://sourceforge.net/p/bigdata/code/8289 Author: mrpersonick Date: 2014-05-12 22:24:18 +0000 (Mon, 12 May 2014) Log Message: ----------- get all the blueprints code back in Modified Paths: -------------- branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/BlueprintsServlet.java branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java branches/BLUEPRINTS/build.xml Added Paths: ----------- branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphBulkLoad.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphEmbedded.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphFactory.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphQuery.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataPredicate.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataRDFFactory.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataVertex.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BlueprintsRDFFactory.java branches/BLUEPRINTS/bigdata-blueprints/src/test/com/bigdata/blueprints/TestBigdataGraphClient.java branches/BLUEPRINTS/bigdata-blueprints/src/test/com/bigdata/blueprints/TestBigdataGraphEmbedded.java branches/BLUEPRINTS/bigdata-blueprints/src/test/com/bigdata/blueprints/graph-example-1.xml Removed Paths: ------------- branches/BLUEPRINTS/bigdata-blueprints/lib/avro-1.7.6.jar branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphBulkLoad.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphClient.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphEmbedded.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraphFactory.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataRDFFactory.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataVertex.java branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BlueprintsRDFFactory.java branches/BLUEPRINTS/bigdata-blueprints/src/test/com/bigdata/blueprints/TestBigdataGraphClient.java branches/BLUEPRINTS/bigdata-blueprints/src/test/com/bigdata/blueprints/TestBigdataGraphEmbedded.java branches/BLUEPRINTS/bigdata-blueprints/src/test/com/bigdata/blueprints/graph-example-1.xml Deleted: branches/BLUEPRINTS/bigdata-blueprints/lib/avro-1.7.6.jar =================================================================== (Binary files differ) Deleted: branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java =================================================================== --- branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java 2014-05-12 19:06:43 UTC (rev 8288) +++ branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java 2014-05-12 22:24:18 UTC (rev 8289) @@ -1,107 +0,0 @@ -/** -Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. - -Contact: - SYSTAP, LLC - 4501 Tower Road - Greensboro, NC 27410 - lic...@bi... - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; version 2 of the License. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software -Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ -package com.bigdata.blueprints; - -import java.util.Arrays; -import java.util.List; - -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.vocabulary.RDFS; - -import com.tinkerpop.blueprints.Direction; -import com.tinkerpop.blueprints.Edge; -import com.tinkerpop.blueprints.Vertex; - -/** - * Edge implementation that wraps an Edge statement and points to a - * {@link BigdataGraph} instance. - * - * @author mikepersonick - * - */ -public class BigdataEdge extends BigdataElement implements Edge { - - private static final List<String> blacklist = Arrays.asList(new String[] { - "id", "", "label" - }); - - protected final Statement stmt; - - public BigdataEdge(final Statement stmt, final BigdataGraph graph) { - super(stmt.getPredicate(), graph); - - this.stmt = stmt; - } - - @Override - public Object getId() { - return graph.factory.fromEdgeURI(uri); - } - - @Override - public void remove() { - graph.removeEdge(this); - } - - @Override - public String getLabel() { - return (String) graph.getProperty(uri, RDFS.LABEL); - } - - @Override - public Vertex getVertex(final Direction dir) throws IllegalArgumentException { - - if (dir == Direction.BOTH) { - throw new IllegalArgumentException(); - } - - final URI uri = (URI) - (dir == Direction.OUT ? stmt.getSubject() : stmt.getObject()); - - final String id = graph.factory.fromVertexURI(uri); - - return graph.getVertex(id); - - } - - @Override - public void setProperty(final String property, final Object val) { - - if (property == null || blacklist.contains(property)) { - throw new IllegalArgumentException(); - } - - super.setProperty(property, val); - - } - - @Override - public String toString() { - final URI s = (URI) stmt.getSubject(); - final URI p = (URI) stmt.getPredicate(); - final URI o = (URI) stmt.getObject(); - return "e["+p.getLocalName()+"]["+s.getLocalName()+"->"+o.getLocalName()+"]"; - } - -} Added: branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java =================================================================== --- branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java (rev 0) +++ branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java 2014-05-12 22:24:18 UTC (rev 8289) @@ -0,0 +1,115 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.blueprints; + +import java.util.Arrays; +import java.util.List; + +import org.openrdf.model.Statement; +import org.openrdf.model.URI; +import org.openrdf.model.vocabulary.RDFS; + +import com.tinkerpop.blueprints.Direction; +import com.tinkerpop.blueprints.Edge; +import com.tinkerpop.blueprints.Vertex; + +/** + * Edge implementation that wraps an Edge statement and points to a + * {@link BigdataGraph} instance. + * + * @author mikepersonick + * + */ +public class BigdataEdge extends BigdataElement implements Edge { + + private static final List<String> blacklist = Arrays.asList(new String[] { + "id", "", "label" + }); + + protected final Statement stmt; + + public BigdataEdge(final Statement stmt, final BigdataGraph graph) { + super(stmt.getPredicate(), graph); + + this.stmt = stmt; + } + + @Override + public Object getId() { + + return graph.factory.fromEdgeURI(uri); + + } + + @Override + public void remove() { + + graph.removeEdge(this); + + } + + @Override + public String getLabel() { + + return (String) graph.getProperty(uri, RDFS.LABEL); + + } + + @Override + public Vertex getVertex(final Direction dir) throws IllegalArgumentException { + + if (dir == Direction.BOTH) { + throw new IllegalArgumentException(); + } + + final URI uri = (URI) + (dir == Direction.OUT ? stmt.getSubject() : stmt.getObject()); + + final String id = graph.factory.fromVertexURI(uri); + + return graph.getVertex(id); + + } + + @Override + public void setProperty(final String prop, final Object val) { + + if (prop == null || blacklist.contains(prop)) { + throw new IllegalArgumentException(); + } + + super.setProperty(prop, val); + + } + + @Override + public String toString() { + + final URI s = (URI) stmt.getSubject(); + final URI p = (URI) stmt.getPredicate(); + final URI o = (URI) stmt.getObject(); + return "e["+p.getLocalName()+"]["+s.getLocalName()+"->"+o.getLocalName()+"]"; + + } + +} Property changes on: branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataEdge.java ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property Deleted: branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java =================================================================== --- branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java 2014-05-12 19:06:43 UTC (rev 8288) +++ branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java 2014-05-12 22:24:18 UTC (rev 8289) @@ -1,134 +0,0 @@ -/** -Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. - -Contact: - SYSTAP, LLC - 4501 Tower Road - Greensboro, NC 27410 - lic...@bi... - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; version 2 of the License. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software -Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ -package com.bigdata.blueprints; - -import java.util.Arrays; -import java.util.List; -import java.util.Set; - -import org.openrdf.model.Literal; -import org.openrdf.model.URI; - -import com.tinkerpop.blueprints.Element; - -/** - * Base class for {@link BigdataVertex} and {@link BigdataEdge}. Handles - * property-related methods. - * - * @author mikepersonick - * - */ -public abstract class BigdataElement implements Element { - - private static final List<String> blacklist = Arrays.asList(new String[] { - "id", "" - }); - - protected final URI uri; - protected final BigdataGraph graph; - - public BigdataElement(final URI uri, final BigdataGraph graph) { - this.uri = uri; - this.graph = graph; - } - - @Override - @SuppressWarnings("unchecked") - public <T> T getProperty(final String property) { - - final URI p = graph.factory.toPropertyURI(property); - - return (T) graph.getProperty(uri, p); - - } - - @Override - public Set<String> getPropertyKeys() { - - return graph.getPropertyKeys(uri); - - } - - @Override - @SuppressWarnings("unchecked") - public <T> T removeProperty(final String property) { - - final URI p = graph.factory.toPropertyURI(property); - - return (T) graph.removeProperty(uri, p); - - } - - @Override - public void setProperty(final String property, final Object val) { - - if (property == null || blacklist.contains(property)) { - throw new IllegalArgumentException(); - } - - final URI p = graph.factory.toPropertyURI(property); - - final Literal o = graph.factory.toLiteral(val); - - graph.setProperty(uri, p, o); - - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((graph == null) ? 0 : graph.hashCode()); - result = prime * result + ((uri == null) ? 0 : uri.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - BigdataElement other = (BigdataElement) obj; - if (graph == null) { - if (other.graph != null) - return false; - } else if (!graph.equals(other.graph)) - return false; - if (uri == null) { - if (other.uri != null) - return false; - } else if (!uri.equals(other.uri)) - return false; - return true; - } - - @Override - public String toString() { - return uri.toString(); - } - - -} Added: branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java =================================================================== --- branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java (rev 0) +++ branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java 2014-05-12 22:24:18 UTC (rev 8289) @@ -0,0 +1,154 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.blueprints; + +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +import org.openrdf.model.URI; + +import com.tinkerpop.blueprints.Element; + +/** + * Base class for {@link BigdataVertex} and {@link BigdataEdge}. Handles + * property-related methods. + * + * @author mikepersonick + * + */ +public abstract class BigdataElement implements Element { + + private static final List<String> blacklist = Arrays.asList(new String[] { + "id", "" + }); + + protected final URI uri; + protected final BigdataGraph graph; + + public BigdataElement(final URI uri, final BigdataGraph graph) { + this.uri = uri; + this.graph = graph; + } + + @Override + @SuppressWarnings("unchecked") + public <T> T getProperty(final String property) { + + return (T) graph.getProperty(uri, property); + + } + + @Override + public Set<String> getPropertyKeys() { + + return graph.getPropertyKeys(uri); + + } + + @Override + @SuppressWarnings("unchecked") + public <T> T removeProperty(final String property) { + + return (T) graph.removeProperty(uri, property); + + } + + @Override + public void setProperty(final String prop, final Object val) { + + if (prop == null || blacklist.contains(prop)) { + throw new IllegalArgumentException(); + } + + graph.setProperty(uri, prop, val); + + } + + /** + * Simple extension for multi-valued properties. + */ + public void addProperty(final String prop, final Object val) { + + if (prop == null || blacklist.contains(prop)) { + throw new IllegalArgumentException(); + } + + graph.addProperty(uri, prop, val); + + } + + /** + * Simple extension for multi-valued properties. + */ + @SuppressWarnings("unchecked") + public <T> List<T> getProperties(final String property) { + + return (List<T>) graph.getProperties(uri, property); + + } + + /** + * Generated code. + */ + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((graph == null) ? 0 : graph.hashCode()); + result = prime * result + ((uri == null) ? 0 : uri.hashCode()); + return result; + } + + /** + * Generated code. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + BigdataElement other = (BigdataElement) obj; + if (graph == null) { + if (other.graph != null) + return false; + } else if (!graph.equals(other.graph)) + return false; + if (uri == null) { + if (other.uri != null) + return false; + } else if (!uri.equals(other.uri)) + return false; + return true; + } + + @Override + public String toString() { + return uri.toString(); + } + + +} Property changes on: branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataElement.java ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property Deleted: branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java =================================================================== --- branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java 2014-05-12 19:06:43 UTC (rev 8288) +++ branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java 2014-05-12 22:24:18 UTC (rev 8289) @@ -1,851 +0,0 @@ -/** -Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. - -Contact: - SYSTAP, LLC - 4501 Tower Road - Greensboro, NC 27410 - lic...@bi... - -This program is free software; you can redistribute it and/or modify -it under the terms of the GNU General Public License as published by -the Free Software Foundation; version 2 of the License. - -This program is distributed in the hope that it will be useful, -but WITHOUT ANY WARRANTY; without even the implied warranty of -MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -GNU General Public License for more details. - -You should have received a copy of the GNU General Public License -along with this program; if not, write to the Free Software -Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -*/ -package com.bigdata.blueprints; - -import info.aduna.iteration.CloseableIteration; - -import java.util.Iterator; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; -import java.util.UUID; - -import org.apache.commons.io.IOUtils; -import org.openrdf.OpenRDFException; -import org.openrdf.model.Literal; -import org.openrdf.model.Statement; -import org.openrdf.model.URI; -import org.openrdf.model.Value; -import org.openrdf.model.impl.StatementImpl; -import org.openrdf.model.impl.URIImpl; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; -import org.openrdf.query.GraphQueryResult; -import org.openrdf.query.QueryLanguage; -import org.openrdf.repository.RepositoryConnection; -import org.openrdf.repository.RepositoryResult; - -import com.bigdata.rdf.store.BD; -import com.tinkerpop.blueprints.Direction; -import com.tinkerpop.blueprints.Edge; -import com.tinkerpop.blueprints.Features; -import com.tinkerpop.blueprints.Graph; -import com.tinkerpop.blueprints.GraphQuery; -import com.tinkerpop.blueprints.Vertex; -import com.tinkerpop.blueprints.util.DefaultGraphQuery; -import com.tinkerpop.blueprints.util.io.graphml.GraphMLReader; - -/** - * A base class for a Blueprints wrapper around a bigdata back-end. - * - * @author mikepersonick - * - */ -public abstract class BigdataGraph implements Graph { - - public static final URI VERTEX = new URIImpl(BD.NAMESPACE + "Vertex"); - - public static final URI EDGE = new URIImpl(BD.NAMESPACE + "Edge"); - -// final BigdataSailRepository repo; -// -// transient BigdataSailRepositoryConnection cxn; - - final BlueprintsRDFFactory factory; - -// public BigdataGraph(final BigdataSailRepository repo) { -// this(repo, BigdataRDFFactory.INSTANCE); -// } - - public BigdataGraph(//final BigdataSailRepository repo, - final BlueprintsRDFFactory factory) { -// try { -// this.repo = repo; -// this.cxn = repo.getUnisolatedConnection(); -// this.cxn.setAutoCommit(false); - this.factory = factory; -// } catch (RepositoryException ex) { -// throw new RuntimeException(ex); -// } - } - - public String toString() { - return getClass().getSimpleName().toLowerCase(); - } - - /** - * Post a GraphML file to the remote server. (Bulk-upload operation.) - */ - public void loadGraphML(final String file) throws Exception { - GraphMLReader.inputGraph(this, file); - } - - protected abstract RepositoryConnection cxn() throws Exception; - -// public BigdataSailRepositoryConnection getConnection() { -// return this.cxn; -// } -// -// public BlueprintsRDFFactory getFactory() { -// return this.factory; -// } - -// public Value getValue(final URI s, final URI p) { -// -// try { -// -// final RepositoryResult<Statement> result = -// cxn.getStatements(s, p, null, false); -// -// if (result.hasNext()) { -// -// final Value o = result.next().getObject(); -// -// if (result.hasNext()) { -// throw new RuntimeException(s -// + ": more than one value for p: " + p -// + ", did you mean to call getValues()?"); -// } -// -// return o; -// -// } -// -// return null; -// -// } catch (Exception ex) { -// throw new RuntimeException(ex); -// } -// -// } - - public Object getProperty(final URI s, final URI p) { - - try { - - final RepositoryResult<Statement> result = - cxn().getStatements(s, p, null, false); - - if (result.hasNext()) { - - final Value value = result.next().getObject(); - - if (result.hasNext()) { - throw new RuntimeException(s - + ": more than one value for p: " + p - + ", did you mean to call getValues()?"); - } - - if (!(value instanceof Literal)) { - throw new RuntimeException("not a property: " + value); - } - - final Literal lit = (Literal) value; - - return factory.fromLiteral(lit); - - } - - return null; - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - -// public List<Value> getValues(final URI s, final URI p) { -// -// try { -// -// final RepositoryResult<Statement> result = -// cxn().getStatements(s, p, null, false); -// -// final List<Value> values = new LinkedList<Value>(); -// -// while (result.hasNext()) { -// -// final Value o = result.next().getObject(); -// -// values.add(o); -// -// } -// -// return values; -// -// } catch (Exception ex) { -// throw new RuntimeException(ex); -// } -// -// } - - public List<Object> getProperties(final URI s, final URI p) { - - try { - - final RepositoryResult<Statement> result = - cxn().getStatements(s, p, null, false); - - final List<Object> props = new LinkedList<Object>(); - - while (result.hasNext()) { - - final Value value = result.next().getObject(); - - if (!(value instanceof Literal)) { - throw new RuntimeException("not a property: " + value); - } - - final Literal lit = (Literal) value; - - props.add(factory.fromLiteral(lit)); - - } - - return props; - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - public Set<String> getPropertyKeys(final URI s) { - - try { - - final RepositoryResult<Statement> result = - cxn().getStatements(s, null, null, false); - - final Set<String> properties = new LinkedHashSet<String>(); - - while (result.hasNext()) { - - final Statement stmt = result.next(); - - if (!(stmt.getObject() instanceof Literal)) { - continue; - } - - if (stmt.getPredicate().equals(RDFS.LABEL)) { - continue; - } - - final String p = - factory.fromPropertyURI(stmt.getPredicate()); - - properties.add(p); - - } - - return properties; - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - public Object removeProperty(final URI s, final URI p) { - - try { - - final Object oldVal = getProperty(s, p); - - cxn().remove(s, p, null); - - return oldVal; - - } catch (Exception e) { - throw new RuntimeException(e); - } - - } - - public void setProperty(final URI s, final URI p, final Literal o) { - - try { - - cxn().remove(s, p, null); - - cxn().add(s, p, o); - - } catch (Exception e) { - throw new RuntimeException(e); - } - - } - - @Override - public Edge addEdge(final Object key, final Vertex from, final Vertex to, - final String label) { - - if (label == null) { - throw new IllegalArgumentException(); - } - - final String eid = key != null ? key.toString() : UUID.randomUUID().toString(); - - final URI edgeURI = factory.toEdgeURI(eid); - - if (key != null) { - - final Edge edge = getEdge(key); - - if (edge != null) { - if (!(edge.getVertex(Direction.OUT).equals(from) && - (edge.getVertex(Direction.OUT).equals(to)))) { - throw new IllegalArgumentException("edge already exists: " + key); - } - } - - } - - try { - -// if (cxn().hasStatement(edgeURI, RDF.TYPE, EDGE, false)) { -// throw new IllegalArgumentException("edge " + eid + " already exists"); -// } - - final URI fromURI = factory.toVertexURI(from.getId().toString()); - final URI toURI = factory.toVertexURI(to.getId().toString()); - - cxn().add(fromURI, edgeURI, toURI); - cxn().add(edgeURI, RDF.TYPE, EDGE); - cxn().add(edgeURI, RDFS.LABEL, factory.toLiteral(label)); - - return new BigdataEdge(new StatementImpl(fromURI, edgeURI, toURI), this); - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Vertex addVertex(final Object key) { - - try { - - final String vid = key != null ? - key.toString() : UUID.randomUUID().toString(); - - final URI uri = factory.toVertexURI(vid); - -// if (cxn().hasStatement(vertexURI, RDF.TYPE, VERTEX, false)) { -// throw new IllegalArgumentException("vertex " + vid + " already exists"); -// } - - cxn().add(uri, RDF.TYPE, VERTEX); - - return new BigdataVertex(uri, this); - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Edge getEdge(final Object key) { - - if (key == null) - throw new IllegalArgumentException(); - - try { - - final URI edge = factory.toEdgeURI(key.toString()); - - final RepositoryResult<Statement> result = - cxn().getStatements(null, edge, null, false); - - if (result.hasNext()) { - - final Statement stmt = result.next(); - - if (result.hasNext()) { - throw new RuntimeException( - "duplicate edge: " + key); - } - - return new BigdataEdge(stmt, this); - - } - - return null; - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Iterable<Edge> getEdges() { - - final URI wild = null; - return getEdges(wild, wild); - - } - - public Iterable<Edge> getEdges(final URI s, final URI o, final String... labels) { - - try { - -// final RepositoryResult<Statement> result = -// cxn().getStatements(s, p, o, false); -// -// return new EdgeIterable(result); - - final StringBuilder sb = new StringBuilder(); - sb.append("construct { ?from ?edge ?to . } where {\n"); - sb.append("?edge rdf:type bd:Edge . ?from ?edge ?to .\n"); - if (labels != null && labels.length > 0) { - if (labels.length == 1) { - sb.append("?edge rdfs:label \"").append(labels[0]).append("\" .\n"); - } else { - sb.append("?edge rdfs:label ?label .\n"); - sb.append("filter(?label in ("); - for (String label : labels) { - sb.append("\""+label+"\", "); - } - sb.setLength(sb.length()-2); - sb.append(")) .\n"); - } - } - sb.append("}"); - - final String queryStr = sb.toString() - .replace("?from", s != null ? "<"+s+">" : "?from") - .replace("?to", o != null ? "<"+o+">" : "?to"); - - final org.openrdf.query.GraphQuery query = - cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); - - final GraphQueryResult stmts = query.evaluate(); - - return new EdgeIterable(stmts); - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - public Iterable<Vertex> getVertices(final URI s, final URI o, - final String... labels) { - - if (s != null && o != null) { - throw new IllegalArgumentException(); - } - - if (s == null && o == null) { - throw new IllegalArgumentException(); - } - - try { - -// final RepositoryResult<Statement> result = -// cxn().getStatements(s, null, o, false); -// -// return new VertexIterable(result, s == null); - - final StringBuilder sb = new StringBuilder(); - sb.append("construct { ?from ?edge ?to . } where {\n"); - sb.append("?edge rdf:type bd:Edge . ?from ?edge ?to .\n"); - if (labels != null && labels.length > 0) { - if (labels.length == 1) { - sb.append("?edge rdfs:label \"").append(labels[0]).append("\" .\n"); - } else { - sb.append("?edge rdfs:label ?label .\n"); - sb.append("filter(?label in ("); - for (String label : labels) { - sb.append("\""+label+"\", "); - } - sb.setLength(sb.length()-2); - sb.append(")) .\n"); - } - } - sb.append("}"); - - final String queryStr = sb.toString() - .replace("?from", s != null ? "<"+s+">" : "?from") - .replace("?to", o != null ? "<"+o+">" : "?to"); - - final org.openrdf.query.GraphQuery query = - cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); - - final GraphQueryResult stmts = query.evaluate(); - - return new VertexIterable(stmts, s == null); - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - public final <T> Iterable<T> fuse(final Iterable<T>... args) { - - return new FusedIterable<T>(args); - } - - - @Override - public Iterable<Edge> getEdges(final String prop, final Object val) { - - final URI p = factory.toPropertyURI(prop); - final Literal o = factory.toLiteral(val); - - try { - - final String queryStr = IOUtils.toString( - getClass().getResourceAsStream("edgesByProperty.rq")) - .replace("?prop", "<"+p+">") - .replace("?val", o.toString()); - - final org.openrdf.query.GraphQuery query = - cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); - - final GraphQueryResult stmts = query.evaluate(); - - return new EdgeIterable(stmts); - - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Features getFeatures() { - - return FEATURES; - - } - - @Override - public Vertex getVertex(final Object key) { - - if (key == null) - throw new IllegalArgumentException(); - - final URI uri = factory.toVertexURI(key.toString()); - try { - if (cxn().hasStatement(uri, RDF.TYPE, VERTEX, false)) { - return new BigdataVertex(uri, this); - } - return null; - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Iterable<Vertex> getVertices() { - - try { - final RepositoryResult<Statement> result = - cxn().getStatements(null, RDF.TYPE, VERTEX, false); - return new VertexIterable(result, true); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public Iterable<Vertex> getVertices(String prop, Object val) { - - final URI p = factory.toPropertyURI(prop); - final Literal o = factory.toLiteral(val); - try { - final RepositoryResult<Statement> result = - cxn().getStatements(null, p, o, false); - return new VertexIterable(result, true); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - } - - @Override - public GraphQuery query() { - return new DefaultGraphQuery(this); - } - - @Override - public void removeEdge(final Edge edge) { - try { - final URI uri = factory.toURI(edge); - if (!cxn().hasStatement(uri, RDF.TYPE, EDGE, false)) { - throw new IllegalStateException(); - } - final URI wild = null; - // remove the edge statement - cxn().remove(wild, uri, wild); - // remove its properties - cxn().remove(uri, wild, wild); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - @Override - public void removeVertex(final Vertex vertex) { - try { - final URI uri = factory.toURI(vertex); - if (!cxn().hasStatement(uri, RDF.TYPE, VERTEX, false)) { - throw new IllegalStateException(); - } - final URI wild = null; - // remove outgoing links and properties - cxn().remove(uri, wild, wild); - // remove incoming links - cxn().remove(wild, wild, uri); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - -// @Override -// public void commit() { -// try { -// cxn().commit(); -// } catch (RepositoryException e) { -// throw new RuntimeException(e); -// } -// } -// -// @Override -// public void rollback() { -// try { -// cxn().rollback(); -// cxn.close(); -// cxn = repo.getUnisolatedConnection(); -// cxn.setAutoCommit(false); -// } catch (RepositoryException e) { -// throw new RuntimeException(e); -// } -// } -// -// @Override -// public void shutdown() { -// try { -// cxn.close(); -// repo.shutDown(); -// } catch (RepositoryException e) { -// throw new RuntimeException(e); -// } -// } -// -// @Override -// @Deprecated -// public void stopTransaction(Conclusion arg0) { -// } - - public class VertexIterable implements Iterable<Vertex>, Iterator<Vertex> { - - private final CloseableIteration<Statement, ? extends OpenRDFException> stmts; - - private final boolean subject; - - private final List<Vertex> cache; - - public VertexIterable( - final CloseableIteration<Statement, ? extends OpenRDFException> stmts, - final boolean subject) { - this.stmts = stmts; - this.subject = subject; - this.cache = new LinkedList<Vertex>(); - } - - @Override - public boolean hasNext() { - try { - return stmts.hasNext(); - } catch (OpenRDFException e) { - throw new RuntimeException(e); - } - } - - @Override - public Vertex next() { - try { - final Statement stmt = stmts.next(); - final URI v = (URI) - (subject ? stmt.getSubject() : stmt.getObject()); - if (!hasNext()) { - stmts.close(); - } - final Vertex vertex = new BigdataVertex(v, BigdataGraph.this); - cache.add(vertex); - return vertex; - } catch (OpenRDFException e) { - throw new RuntimeException(e); - } - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - - @Override - public Iterator<Vertex> iterator() { - return hasNext() ? this : cache.iterator(); - } - - } - - public class EdgeIterable implements Iterable<Edge>, Iterator<Edge> { - - private final CloseableIteration<Statement, ? extends OpenRDFException> stmts; - - private final List<Edge> cache; - - public EdgeIterable( - final CloseableIteration<Statement, ? extends OpenRDFException> stmts) { - this.stmts = stmts; - this.cache = new LinkedList<Edge>(); - } - - @Override - public boolean hasNext() { - try { - return stmts.hasNext(); - } catch (OpenRDFException e) { - throw new RuntimeException(e); - } - } - - @Override - public Edge next() { - try { - final Statement stmt = stmts.next(); - if (!hasNext()) { - stmts.close(); - } - final Edge edge = new BigdataEdge(stmt, BigdataGraph.this); - cache.add(edge); - return edge; - } catch (OpenRDFException e) { - throw new RuntimeException(e); - } - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - - @Override - public Iterator<Edge> iterator() { - return hasNext() ? this : cache.iterator(); - } - - } - - public class FusedIterable<T> implements Iterable<T>, Iterator<T> { - - private final Iterable<T>[] args; - - private transient int i = 0; - - private transient Iterator<T> curr; - - public FusedIterable(final Iterable<T>... args) { - this.args = args; - this.curr = args[0].iterator(); - } - - @Override - public boolean hasNext() { - if (curr.hasNext()) { - return true; - } - while (!curr.hasNext() && i < (args.length-1)) { - curr = args[++i].iterator(); - if (curr.hasNext()) { - return true; - } - } - return false; - } - - @Override - public T next() { - return curr.next(); - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - - @Override - public Iterator<T> iterator() { - return this; - } - - } - - protected static final Features FEATURES = new Features(); - - static { - - FEATURES.supportsSerializableObjectProperty = false; - FEATURES.supportsBooleanProperty = true; - FEATURES.supportsDoubleProperty = true; - FEATURES.supportsFloatProperty = true; - FEATURES.supportsIntegerProperty = true; - FEATURES.supportsPrimitiveArrayProperty = false; - FEATURES.supportsUniformListProperty = false; - FEATURES.supportsMixedListProperty = false; - FEATURES.supportsLongProperty = true; - FEATURES.supportsMapProperty = false; - FEATURES.supportsStringProperty = true; - - FEATURES.supportsDuplicateEdges = true; - FEATURES.supportsSelfLoops = true; - FEATURES.isPersistent = true; - FEATURES.isWrapper = false; - FEATURES.supportsVertexIteration = true; - FEATURES.supportsEdgeIteration = true; - FEATURES.supportsVertexIndex = false; - FEATURES.supportsEdgeIndex = false; - FEATURES.ignoresSuppliedIds = true; - FEATURES.supportsTransactions = false; - FEATURES.supportsIndices = true; - FEATURES.supportsKeyIndices = true; - FEATURES.supportsVertexKeyIndex = true; - FEATURES.supportsEdgeKeyIndex = true; - FEATURES.supportsEdgeRetrieval = true; - FEATURES.supportsVertexProperties = true; - FEATURES.supportsEdgeProperties = true; - FEATURES.supportsThreadedTransactions = false; - } - -} Added: branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java =================================================================== --- branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java (rev 0) +++ branches/BLUEPRINTS/bigdata-blueprints/src/java/com/bigdata/blueprints/BigdataGraph.java 2014-05-12 22:24:18 UTC (rev 8289) @@ -0,0 +1,1017 @@ +/** +Copyright (C) SYSTAP, LLC 2006-2014. All rights reserved. + +Contact: + SYSTAP, LLC + 4501 Tower Road + Greensboro, NC 27410 + lic...@bi... + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; version 2 of the License. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +*/ +package com.bigdata.blueprints; + +import info.aduna.iteration.CloseableIteration; + +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.UUID; + +import org.openrdf.OpenRDFException; +import org.openrdf.model.Literal; +import org.openrdf.model.Statement; +import org.openrdf.model.URI; +import org.openrdf.model.Value; +import org.openrdf.model.impl.StatementImpl; +import org.openrdf.model.impl.URIImpl; +import org.openrdf.model.vocabulary.RDF; +import org.openrdf.model.vocabulary.RDFS; +import org.openrdf.query.GraphQueryResult; +import org.openrdf.query.QueryLanguage; +import org.openrdf.repository.RepositoryConnection; +import org.openrdf.repository.RepositoryResult; + +import com.bigdata.rdf.store.BD; +import com.tinkerpop.blueprints.Direction; +import com.tinkerpop.blueprints.Edge; +import com.tinkerpop.blueprints.Features; +import com.tinkerpop.blueprints.Graph; +import com.tinkerpop.blueprints.GraphQuery; +import com.tinkerpop.blueprints.Vertex; +import com.tinkerpop.blueprints.util.io.graphml.GraphMLReader; + +/** + * A base class for a Blueprints wrapper around a bigdata back-end. + * + * @author mikepersonick + * + */ +public abstract class BigdataGraph implements Graph { + + /** + * URI used to represent a Vertex. + */ + public static final URI VERTEX = new URIImpl(BD.NAMESPACE + "Vertex"); + + /** + * URI used to represent a Edge. + */ + public static final URI EDGE = new URIImpl(BD.NAMESPACE + "Edge"); + + /** + * Factory for round-tripping between Blueprints data and RDF data. + */ + final BlueprintsRDFFactory factory; + + public BigdataGraph(final BlueprintsRDFFactory factory) { + + this.factory = factory; + + } + + /** + * For some reason this is part of the specification (i.e. part of the + * Blueprints test suite). + */ + public String toString() { + + return getClass().getSimpleName().toLowerCase(); + + } + + /** + * Different implementations will return different types of connections + * depending on the mode (client/server, embedded, read-only, etc.) + */ + protected abstract RepositoryConnection cxn() throws Exception; + + /** + * Return a single-valued property for an edge or vertex. + * + * @see {@link BigdataElement} + */ + public Object getProperty(final URI uri, final String prop) { + + return getProperty(uri, factory.toPropertyURI(prop)); + + } + + /** + * Return a single-valued property for an edge or vertex. + * + * @see {@link BigdataElement} + */ + public Object getProperty(final URI uri, final URI prop) { + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(uri, prop, null, false); + + if (result.hasNext()) { + + final Value value = result.next().getObject(); + + if (result.hasNext()) { + throw new RuntimeException(uri + + ": more than one value for p: " + prop + + ", did you mean to call getProperties()?"); + } + + if (!(value instanceof Literal)) { + throw new RuntimeException("not a property: " + value); + } + + final Literal lit = (Literal) value; + + return factory.fromLiteral(lit); + + } + + return null; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Return a multi-valued property for an edge or vertex. + * + * @see {@link BigdataElement} + */ + public List<Object> getProperties(final URI uri, final String prop) { + + return getProperties(uri, factory.toPropertyURI(prop)); + + } + + + /** + * Return a multi-valued property for an edge or vertex. + * + * @see {@link BigdataElement} + */ + public List<Object> getProperties(final URI uri, final URI prop) { + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(uri, prop, null, false); + + final List<Object> props = new LinkedList<Object>(); + + while (result.hasNext()) { + + final Value value = result.next().getObject(); + + if (!(value instanceof Literal)) { + throw new RuntimeException("not a property: " + value); + } + + final Literal lit = (Literal) value; + + props.add(factory.fromLiteral(lit)); + + } + + return props; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Return the property names for an edge or vertex. + * + * @see {@link BigdataElement} + */ + public Set<String> getPropertyKeys(final URI uri) { + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(uri, null, null, false); + + final Set<String> properties = new LinkedHashSet<String>(); + + while (result.hasNext()) { + + final Statement stmt = result.next(); + + if (!(stmt.getObject() instanceof Literal)) { + continue; + } + + if (stmt.getPredicate().equals(RDFS.LABEL)) { + continue; + } + + final String p = + factory.fromPropertyURI(stmt.getPredicate()); + + properties.add(p); + + } + + return properties; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Remove all values for a particular property on an edge or vertex. + * + * @see {@link BigdataElement} + */ + public Object removeProperty(final URI uri, final String prop) { + + return removeProperty(uri, factory.toPropertyURI(prop)); + + } + + /** + * Remove all values for a particular property on an edge or vertex. + * + * @see {@link BigdataElement} + */ + public Object removeProperty(final URI uri, final URI prop) { + + try { + + final Object oldVal = getProperty(uri, prop); + + cxn().remove(uri, prop, null); + + return oldVal; + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + + /** + * Set a single-value property on an edge or vertex (remove the old + * value first). + * + * @see {@link BigdataElement} + */ + public void setProperty(final URI uri, final String prop, final Object val) { + + setProperty(uri, factory.toPropertyURI(prop), factory.toLiteral(val)); + + } + + /** + * Set a single-value property on an edge or vertex (remove the old + * value first). + * + * @see {@link BigdataElement} + */ + public void setProperty(final URI uri, final URI prop, final Literal val) { + + try { + + cxn().remove(uri, prop, null); + + cxn().add(uri, prop, val); + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + + /** + * Add a property on an edge or vertex (multi-value property extension). + * + * @see {@link BigdataElement} + */ + public void addProperty(final URI uri, final String prop, final Object val) { + + setProperty(uri, factory.toPropertyURI(prop), factory.toLiteral(val)); + + } + + /** + * Add a property on an edge or vertex (multi-value property extension). + * + * @see {@link BigdataElement} + */ + public void addProperty(final URI uri, final URI prop, final Literal val) { + + try { + + cxn().add(uri, prop, val); + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + + /** + * Post a GraphML file to the remote server. (Bulk-upload operation.) + */ + public void loadGraphML(final String file) throws Exception { + + GraphMLReader.inputGraph(this, file); + + } + + /** + * Add an edge. + */ + @Override + public Edge addEdge(final Object key, final Vertex from, final Vertex to, + final String label) { + + if (label == null) { + throw new IllegalArgumentException(); + } + + final String eid = key != null ? key.toString() : UUID.randomUUID().toString(); + + final URI edgeURI = factory.toEdgeURI(eid); + + if (key != null) { + + final Edge edge = getEdge(key); + + if (edge != null) { + if (!(edge.getVertex(Direction.OUT).equals(from) && + (edge.getVertex(Direction.OUT).equals(to)))) { + throw new IllegalArgumentException("edge already exists: " + key); + } + } + + } + + try { + + // do we need to check this? +// if (cxn().hasStatement(edgeURI, RDF.TYPE, EDGE, false)) { +// throw new IllegalArgumentException("edge " + eid + " already exists"); +// } + + final URI fromURI = factory.toVertexURI(from.getId().toString()); + final URI toURI = factory.toVertexURI(to.getId().toString()); + + cxn().add(fromURI, edgeURI, toURI); + cxn().add(edgeURI, RDF.TYPE, EDGE); + cxn().add(edgeURI, RDFS.LABEL, factory.toLiteral(label)); + + return new BigdataEdge(new StatementImpl(fromURI, edgeURI, toURI), this); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Add a vertex. + */ + @Override + public Vertex addVertex(final Object key) { + + try { + + final String vid = key != null ? + key.toString() : UUID.randomUUID().toString(); + + final URI uri = factory.toVertexURI(vid); + + // do we need to check this? +// if (cxn().hasStatement(vertexURI, RDF.TYPE, VERTEX, false)) { +// throw new IllegalArgumentException("vertex " + vid + " already exists"); +// } + + cxn().add(uri, RDF.TYPE, VERTEX); + + return new BigdataVertex(uri, this); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Lookup an edge. + */ + @Override + public Edge getEdge(final Object key) { + + if (key == null) + throw new IllegalArgumentException(); + + try { + + final URI edge = factory.toEdgeURI(key.toString()); + + final RepositoryResult<Statement> result = + cxn().getStatements(null, edge, null, false); + + if (result.hasNext()) { + + final Statement stmt = result.next(); + + if (result.hasNext()) { + throw new RuntimeException( + "duplicate edge: " + key); + } + + return new BigdataEdge(stmt, this); + + } + + return null; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Iterate all edges. + */ + @Override + public Iterable<Edge> getEdges() { + + final URI wild = null; + return getEdges(wild, wild); + + } + + /** + * Find edges based on the from and to vertices and the edge labels, all + * optional parameters (can be null). The edge labels can be null to include + * all labels. + * <p> + * + * @param from + * the from vertex (null for wildcard) + * @param to + * the to vertex (null for wildcard) + * @param labels + * the edge labels to consider (optional) + * @return the edges matching the supplied criteria + */ + Iterable<Edge> getEdges(final URI from, final URI to, final String... labels) { + + final GraphQueryResult stmts = getElements(from, to, labels); + + return new EdgeIterable(stmts); + + } + + /** + * Translates the request to a high-performance SPARQL query: + * + * construct { + * ?from ?edge ?to . + * } where { + * ?edge rdf:type <Edge> . + * + * ?from ?edge ?to . + * + * # filter by edge label + * ?edge rdfs:label ?label . + * filter(?label in ("label1", "label2", ...)) . + * } + */ + protected GraphQueryResult getElements(final URI from, final URI to, + final String... labels) { + + final StringBuilder sb = new StringBuilder(); + sb.append("construct { ?from ?edge ?to . } where {\n"); + sb.append(" ?edge rdf:type bd:Edge .\n"); + sb.append(" ?from ?edge ?to .\n"); + if (labels != null && labels.length > 0) { + if (labels.length == 1) { + sb.append(" ?edge rdfs:label \"").append(labels[0]).append("\" .\n"); + } else { + sb.append(" ?edge rdfs:label ?label .\n"); + sb.append(" filter(?label in ("); + for (String label : labels) { + sb.append("\""+label+"\", "); + } + sb.setLength(sb.length()-2); + sb.append(")) .\n"); + } + } + sb.append("}"); + + // bind the from and/or to + final String queryStr = sb.toString() + .replace("?from", from != null ? "<"+from+">" : "?from") + .replace("?to", to != null ? "<"+to+">" : "?to"); + + try { + + final org.openrdf.query.GraphQuery query = + cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); + + final GraphQueryResult stmts = query.evaluate(); + + return stmts; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Find edges based on a SPARQL construct query. The query MUST construct + * edge statements: + * <p> + * construct { ?from ?edge ?to } where { ... } + * + * @see {@link BigdataGraphQuery} + */ + Iterable<Edge> getEdges(final String queryStr) { + + try { + + final org.openrdf.query.GraphQuery query = + cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); + + final GraphQueryResult stmts = query.evaluate(); + + return new EdgeIterable(stmts); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Find vertices based on the supplied from and to vertices and the edge + * labels. One or the other (from and to) must be null (wildcard), but not + * both. Use getEdges() for wildcards on both the from and to. The edge + * labels can be null to include all labels. + * + * @param from + * the from vertex (null for wildcard) + * @param to + * the to vertex (null for wildcard) + * @param labels + * the edge labels to consider (optional) + * @return + * the vertices matching the supplied criteria + */ + Iterable<Vertex> getVertices(final URI from, final URI to, + final String... labels) { + + if (from != null && to != null) { + throw new IllegalArgumentException(); + } + + if (from == null && to == null) { + throw new IllegalArgumentException(); + } + + final GraphQueryResult stmts = getElements(from, to, labels); + + return new VertexIterable(stmts, from == null); + + } + + /** + * Find vertices based on a SPARQL construct query. If the subject parameter + * is true, the vertices will be taken from the subject position of the + * constructed statements, otherwise they will be taken from the object + * position. + * + * @see {@link BigdataGraphQuery} + */ + Iterable<Vertex> getVertices(final String queryStr, final boolean subject) { + + try { + + final org.openrdf.query.GraphQuery query = + cxn().prepareGraphQuery(QueryLanguage.SPARQL, queryStr); + + final GraphQueryResult stmts = query.evaluate(); + + return new VertexIterable(stmts, subject); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Find edges with the supplied property value. + * + * construct { + * ?from ?edge ?to . + * } + * where { + * ?edge <prop> <val> . + * ?from ?edge ?to . + * } + */ + @Override + public Iterable<Edge> getEdges(final String prop, final Object val) { + + final URI p = factory.toPropertyURI(prop); + final Literal o = factory.toLiteral(val); + + try { + + final StringBuilder sb = new StringBuilder(); + sb.append("construct { ?from ?edge ?to . } where {\n"); + sb.append(" ?edge <"+p+"> "+o+" .\n"); + sb.append(" ?from ?edge ?to .\n"); + sb.append("}"); + + final String queryStr = sb.toString(); + + return getEdges(queryStr); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Lookup a vertex. + */ + @Override + public Vertex getVertex(final Object key) { + + if (key == null) + throw new IllegalArgumentException(); + + final URI uri = factory.toVertexURI(key.toString()); + + try { + + if (cxn().hasStatement(uri, RDF.TYPE, VERTEX, false)) { + return new BigdataVertex(uri, this); + } + + return null; + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + + /** + * Iterate all vertices. + */ + @Override + public Iterable<Vertex> getVertices() { + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(null, RDF.TYPE, VERTEX, false); + + return new VertexIterable(result, true); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Find vertices with the supplied property value. + */ + @Override + public Iterable<Vertex> getVertices(final String prop, final Object val) { + + final URI p = factory.toPropertyURI(prop); + final Literal o = factory.toLiteral(val); + + try { + + final RepositoryResult<Statement> result = + cxn().getStatements(null, p, o, false); + + return new VertexIterable(result, true); + + } catch (Exception ex) { + throw new RuntimeException(ex); + } + + } + + /** + * Providing an override implementation for our GraphQuery to avoid the + * low-performance scan and filter paradigm. See {@link BigdataGraphQuery}. + */ + @Override + public GraphQuery query() { +// return new DefaultGraphQuery(this); + return new BigdataGraphQuery(this); + } + + /** + * Remove an edge and its properties. + */ + @Override + public void removeEdge(final Edge edge) { + + try { + + final URI uri = factory.toURI(edge); + + if (!cxn().hasStatement(uri, RDF.TYPE, EDGE, false)) { + throw new IllegalStateException(); + } + + final URI wild = null; + + // remove the edge statement + cxn().remove(wild, uri, wild); + + // remove its properties + cxn().remove(uri, wild, wild); + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + + /** + * Remove a vertex and its edges and properties. + */ + @Override + public void removeVertex(final Vertex vertex) { + + try { + + final URI uri = factory.toURI(vertex); + + if (!cxn().hasStatement(uri, RDF.TYPE, VERTEX, false)) { + throw new IllegalStateException(); + } + + final URI wild = null; + + // remove outgoing edges and properties + cxn().remove(uri, wild, wild); + + // remove incoming edges + cxn().remove(wild, wild, uri); + + } catch (Exception e) { + throw new RuntimeException(e); + } + + } + + /** + * Translate a collection of Bigdata statements into an iteration of + * Blueprints vertices. + * + * @author mikepersonick + * + */ + public class VertexIterable implements Iterable<Vertex>, Iterator<Vertex> { + + private final CloseableIteration<Statement, ? extends OpenRDFException> stmts; + + private final boolean subject;... [truncated message content] |
From: <mrp...@us...> - 2014-05-12 19:06:46
|
Revision: 8288 http://sourceforge.net/p/bigdata/code/8288 Author: mrpersonick Date: 2014-05-12 19:06:43 +0000 (Mon, 12 May 2014) Log Message: ----------- pull in the avro classes - should be dirty Modified Paths: -------------- branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java Modified: branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java =================================================================== --- branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java 2014-05-12 18:39:08 UTC (rev 8287) +++ branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java 2014-05-12 19:06:43 UTC (rev 8288) @@ -50,7 +50,7 @@ // // pull a Tinkerpop interface into the class loader // log.info(com.tinkerpop.blueprints.Graph.class.getName()); -// log.info(org.apache.avro.Schema.class.getName()); + log.info(org.apache.avro.Schema.class.getName()); } /** This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <mrp...@us...> - 2014-05-12 18:39:11
|
Revision: 8287 http://sourceforge.net/p/bigdata/code/8287 Author: mrpersonick Date: 2014-05-12 18:39:08 +0000 (Mon, 12 May 2014) Log Message: ----------- pulling out the avro banner, should be clean? Modified Paths: -------------- branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java Modified: branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java =================================================================== --- branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java 2014-05-12 18:06:17 UTC (rev 8286) +++ branches/BLUEPRINTS/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/RESTServlet.java 2014-05-12 18:39:08 UTC (rev 8287) @@ -50,7 +50,7 @@ // // pull a Tinkerpop interface into the class loader // log.info(com.tinkerpop.blueprints.Graph.class.getName()); - log.info(org.apache.avro.Schema.class.getName()); +// log.info(org.apache.avro.Schema.class.getName()); } /** This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-12 18:06:20
|
Revision: 8286 http://sourceforge.net/p/bigdata/code/8286 Author: dmekonnen Date: 2014-05-12 18:06:17 +0000 (Mon, 12 May 2014) Log Message: ----------- Updates to support NSS builds from SVN Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/nss.rb Added Paths: ----------- branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/deployment/nss/bin/bigdataNSS Removed Paths: ------------- branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/deployment/nss/bin/bigdata Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb 2014-05-12 16:41:31 UTC (rev 8285) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb 2014-05-12 18:06:17 UTC (rev 8286) @@ -23,7 +23,7 @@ default['bigdata'][:data_dir] = node['bigdata'][:home] + "/var/data" if node['bigdata'][:build_from_svn] - default['bigdata'][:svn_branch] = "https://svn.code.sf.net/p/bigdata/code/branches/BIGDATA_RELEASE_1_3_0" + default['bigdata'][:svn_branch] = "https://svn.code.sf.net/p/bigdata/code/branches/DEPLOYMENT_BRANCH_1_3_1" end when "tomcat" default['tomcat'][:base_version] = 7 Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/nss.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/nss.rb 2014-05-12 16:41:31 UTC (rev 8285) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/nss.rb 2014-05-12 18:06:17 UTC (rev 8286) @@ -36,6 +36,15 @@ cwd "/home/ubuntu/#{node['bigdata'][:source]}" command "ant package-brew-nss" end + + execute "Extract and relocate the bigdata archive" do + cwd "/var/lib" + command "tar xvf /home/ubuntu/#{node['bigdata'][:source]}/REL-NSS.bigdata-1.*.tgz" + end + + link "/etc/init.d/bigdataNSS" do + to "#{node['bigdata'][:home]}/bin/bigdataNSS" + end else # # Retrieve the package prepared for Brew: @@ -51,6 +60,12 @@ command "tar xvf /tmp/bigdata.tgz" end + # + # The script name "bigdata" becomes "bigdataNSS" in the 1.3.1 release + # + link "/etc/init.d/bigdataNSS" do + to "#{node['bigdata'][:home]}/bin/bigdata" + end end @@ -61,10 +76,6 @@ command "chown -R #{node['bigdata'][:user]}:#{node['bigdata'][:group]} ." end - link "/etc/init.d/bigdataNSS" do - to "#{node['bigdata'][:home]}/bin/bigdata" - end - # # We shell out to make template substitutions # Deleted: branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/deployment/nss/bin/bigdata =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/deployment/nss/bin/bigdata 2014-05-12 16:41:31 UTC (rev 8285) +++ branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/deployment/nss/bin/bigdata 2014-05-12 18:06:17 UTC (rev 8286) @@ -1,109 +0,0 @@ -#!/bin/bash - -# init.d style script for bigdata HA services. The script can be used -# to 'start' or 'stop' services. -# -# Environment: -# -# binDir - The directory containing the installed scripts. -# pidFile - The pid is written on this file. -# -# Misc. -# -# See http://tldp.org/LDP/abs/html/index.html -# -# Note: Blank lines are significant in shell scripts. -# -# Note: Children must do "exit 0" to indicate success. -# -# Note: Convert DOS cr-lf to unix style in emacs: C-x RET f then unix - -# Source function library (just used for 'action'). If you don't have this -# it SHOULD automatically use the inline definition for "action()". - -# -# the following template line will be replaced by a deployer application (e.g. brew, chef) -# -export INSTALL_TYPE="<%= INSTALL_TYPE %>" -export BD_HOME="<%= BD_HOME %>" -pidFile=${BD_HOME}/var/lock/pid -binDir=${BD_HOME}/bin - - -# -# See how we were called. -# -case "$1" in - start) -# -# Start the ServiceStarter and child services if not running. -# - if [ -f "$pidFile" ]; then - read pid < "$pidFile" - pidno=$( ps ax | grep $pid | awk '{ print $1 }' | grep $pid ) - if [ -z "$pidno" ]; then -# The process has died so remove the old pid file. - echo $"`date` : `hostname` : $pid died?" - rm -f "$pidFile" - fi - fi - if [ ! -f "$pidFile" ]; then - echo -ne $"`date` : `hostname` : bringing bigdata services up ... " - $binDir/startNSS - echo "done!" - else - echo $"`date` : `hostname` : running as $pid" - fi - ;; - stop) -# -# Stop the ServiceStarter and all child services. -# - if [ -f "$pidFile" ]; then - read pid < "$pidFile" - pidno=$( ps ax | grep $pid | awk '{ print $1 }' | grep $pid ) - if [ -z "$pidno" ]; then -# The process has died so remove the old pid file. - echo $"`date` : `hostname` : $pid died?" - rm -f "$pidFile" - else - echo -ne $"`date` : `hostname` : bringing bigdata service down ... " - kill $pid - rm -f "$pidFile" - echo "done!" - fi - fi - ;; - status) -# -# Report status for the ServicesManager (up or down). -# - if [ -f "$pidFile" ]; then - read pid < "$pidFile" - pidno=$( ps ax | grep $pid | awk '{ print $1 }' | grep $pid ) - if [ -z "$pidno" ]; then - echo $"`date` : `hostname` : process died? pid=$pid." - else - echo $"`date` : `hostname` : running as $pid." - fi - else - echo $"`date` : `hostname` : not running." - fi - ;; -# -# Simply stop then start. -# - restart) - $0 stop - $0 start - ;; - *) -# -# Usage -# - me=`basename $0` - echo $"Usage: $0 {start|stop|status|restart}" - exit 1 -esac - -exit 0 Copied: branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/deployment/nss/bin/bigdataNSS (from rev 8207, branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/deployment/nss/bin/bigdata) =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/deployment/nss/bin/bigdataNSS (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/src/resources/deployment/nss/bin/bigdataNSS 2014-05-12 18:06:17 UTC (rev 8286) @@ -0,0 +1,109 @@ +#!/bin/bash + +# init.d style script for bigdata HA services. The script can be used +# to 'start' or 'stop' services. +# +# Environment: +# +# binDir - The directory containing the installed scripts. +# pidFile - The pid is written on this file. +# +# Misc. +# +# See http://tldp.org/LDP/abs/html/index.html +# +# Note: Blank lines are significant in shell scripts. +# +# Note: Children must do "exit 0" to indicate success. +# +# Note: Convert DOS cr-lf to unix style in emacs: C-x RET f then unix + +# Source function library (just used for 'action'). If you don't have this +# it SHOULD automatically use the inline definition for "action()". + +# +# the following template line will be replaced by a deployer application (e.g. brew, chef) +# +export INSTALL_TYPE="<%= INSTALL_TYPE %>" +export BD_HOME="<%= BD_HOME %>" +pidFile=${BD_HOME}/var/lock/pid +binDir=${BD_HOME}/bin + + +# +# See how we were called. +# +case "$1" in + start) +# +# Start the ServiceStarter and child services if not running. +# + if [ -f "$pidFile" ]; then + read pid < "$pidFile" + pidno=$( ps ax | grep $pid | awk '{ print $1 }' | grep $pid ) + if [ -z "$pidno" ]; then +# The process has died so remove the old pid file. + echo $"`date` : `hostname` : $pid died?" + rm -f "$pidFile" + fi + fi + if [ ! -f "$pidFile" ]; then + echo -ne $"`date` : `hostname` : bringing bigdata services up ... " + $binDir/startNSS + echo "done!" + else + echo $"`date` : `hostname` : running as $pid" + fi + ;; + stop) +# +# Stop the ServiceStarter and all child services. +# + if [ -f "$pidFile" ]; then + read pid < "$pidFile" + pidno=$( ps ax | grep $pid | awk '{ print $1 }' | grep $pid ) + if [ -z "$pidno" ]; then +# The process has died so remove the old pid file. + echo $"`date` : `hostname` : $pid died?" + rm -f "$pidFile" + else + echo -ne $"`date` : `hostname` : bringing bigdata service down ... " + kill $pid + rm -f "$pidFile" + echo "done!" + fi + fi + ;; + status) +# +# Report status for the ServicesManager (up or down). +# + if [ -f "$pidFile" ]; then + read pid < "$pidFile" + pidno=$( ps ax | grep $pid | awk '{ print $1 }' | grep $pid ) + if [ -z "$pidno" ]; then + echo $"`date` : `hostname` : process died? pid=$pid." + else + echo $"`date` : `hostname` : running as $pid." + fi + else + echo $"`date` : `hostname` : not running." + fi + ;; +# +# Simply stop then start. +# + restart) + $0 stop + $0 start + ;; + *) +# +# Usage +# + me=`basename $0` + echo $"Usage: $0 {start|stop|status|restart}" + exit 1 +esac + +exit 0 This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dme...@us...> - 2014-05-12 16:41:35
|
Revision: 8285 http://sourceforge.net/p/bigdata/code/8285 Author: dmekonnen Date: 2014-05-12 16:41:31 +0000 (Mon, 12 May 2014) Log Message: ----------- update to build bigdata from svn. addition of mapgraph recipe. Modified Paths: -------------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/tomcat.rb Added Paths: ----------- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/mapgraph.rb Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb 2014-05-12 15:35:32 UTC (rev 8284) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/attributes/default.rb 2014-05-12 16:41:31 UTC (rev 8285) @@ -5,7 +5,7 @@ default['bigdata'][:user] = "bigdata" default['bigdata'][:group] = "bigdata" -default['bigdata'][:properties] = default['bigdata'][:home] + "RWStore.properties" +default['bigdata'][:properties] = default['bigdata'][:home] + "/RWStore.properties" default['bigdata'][:source] = "bigdata-code" @@ -83,3 +83,6 @@ default['bigdata']['namespace.kb.spo.com.bigdata.btree.BTree.branchingFactor'] = "1024" default['bigdata']['rdf.sail.bufferCapacity'] = "100000" # default['bigdata']['rdf.store.AbstractTripleStore.vocabularyClass'] = "" + +default['mapgraph'][:source] = "mapgraph-code" +default['mapgraph'][:svn_branch] = "https://svn.code.sf.net/p/mpgraph/code/trunk" Added: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/mapgraph.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/mapgraph.rb (rev 0) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/mapgraph.rb 2014-05-12 16:41:31 UTC (rev 8285) @@ -0,0 +1,33 @@ +# +# Cookbook Name:: systap-bigdata +# Recipe:: default +# +# Copyright 2013, Systap +# +# +execute "pull mapgraph from svn repo" do + user 'ubuntu' + group 'ubuntu' + cwd "/home/ubuntu" + command "svn checkout #{default['mapgraph'][:svn_branch]} #{node['mapgraph'][:source]}" +end + +execute "make mapgraph" do + cwd node['mapgraph'][:source] + command "make" +end + +execute "test mapgraph" do + cwd node['mapgraph'][:source] + command "./Algorithms/SSSP/SSSP -g smallRegressionGraphs/small.mtx" +end + + +# +# "recursive true" did not work here +# +# directory node['bigdata'][:mapgraph_home] do +# owner 'ec2-user' +# group 'ec2-user' +# recursive true +# end Modified: branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/tomcat.rb =================================================================== --- branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/tomcat.rb 2014-05-12 15:35:32 UTC (rev 8284) +++ branches/DEPLOYMENT_BRANCH_1_3_1/bigdata/src/resources/deployment/vagrant/systap-bigdata/recipes/tomcat.rb 2014-05-12 16:41:31 UTC (rev 8285) @@ -106,29 +106,53 @@ owner node['tomcat'][:user] group node['tomcat'][:group] mode 00644 - retry_delay 10 + retry_delay 15 retries 3 end # + # Delete all log files so that the error and warning messages that appeared during the installation + # process do not unnecessarily alarm anyone. + # + execute "remove log files before retart" do + cwd "#{node['tomcat'][:log_dir]}" + command "rm *" + end + + + # # The RWStore.properties path is the only property that needs to be adjusted in the web.xml file. # Using a sed command to adjust the property avoids the need to maintain a web.xml template which # in turn updates frequently relative to the other property files. Thus this recipe becomes # suitable against a larger range of bigdata releases. # - execute "set absolute path for RWStore.properties" do - cwd "#{node['bigdata'][:web_home]}/WEB-INF" - command "sed -i 's|<param-value>../webapps/bigdata/RWStore.properties|<param-value>#{node['bigdata'][:home]}/RWStore.properties|' web.xml" - end + if node['bigdata'][:build_from_svn] + execute "set absolute path for RWStore.properties" do + cwd "#{node['bigdata'][:web_home]}/WEB-INF" + command "sed -i 's|<param-value>../webapps/bigdata/WEB-INF/RWStore.properties|<param-value>#{node['bigdata'][:home]}/RWStore.properties|' web.xml" + end + # + # Remove original RWStore.properties file to avoid user confusion + # + file "#{node['bigdata'][:web_home]}/WEB-INF/RWStore.properties" do + action :delete + end + else + # + # 1.3.0 uses a different path for RWStore.properties. We can remove this if block in 1.3.1 + # + execute "set absolute path for RWStore.properties" do + cwd "#{node['bigdata'][:web_home]}/WEB-INF" + command "sed -i 's|<param-value>../webapps/bigdata/RWStore.properties|<param-value>#{node['bigdata'][:home]}/RWStore.properties|' web.xml" + end - # - # Delete all log files so that the error and warning messages that appeared during the installation - # process do not unnecessarily alarm anyone. - # - execute "remove log files before retart" do - cwd "#{node['tomcat'][:log_dir]}" - command "rm *" + # + # Remove original RWStore.properties file to avoid user confusion + # + file "#{node['bigdata'][:web_home]}/RWStore.properties" do + action :delete + end end end This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |