|
From: <tho...@us...> - 2014-04-15 13:01:37
|
Revision: 8121
http://sourceforge.net/p/bigdata/code/8121
Author: thompsonbry
Date: 2014-04-15 13:01:24 +0000 (Tue, 15 Apr 2014)
Log Message:
-----------
Caught up the HA1/HA5 branch with changes in the main development branch prior to bringing back the HA1/HA5 branch to the main development branch.
See #722 (HA1)
See #723 (HA5)
Modified Paths:
--------------
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/counters/ProcessReaderHelper.java
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/counters/win/TypeperfCollector.java
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/resources/AsynchronousOverflowTask.java
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/search/FullTextIndex.java
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/search/ReadIndexTask.java
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/service/ndx/pipeline/AbstractSubtask.java
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/service/proxy/ClientAsynchronousIterator.java
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/util/CSVReader.java
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/test/com/bigdata/cache/StressTestGlobalLRU.java
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/test/com/bigdata/service/ndx/pipeline/TestMasterTaskWithRedirect.java
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/test/com/bigdata/service/ndx/pipeline/TestMasterTaskWithSplits.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-jini/src/java/com/bigdata/journal/jini/ha/HARestore.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-jini/src/java/com/bigdata/journal/jini/ha/SnapshotManager.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestAll.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3JournalServer.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/eval/ASTSearchOptimizer.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/eval/SearchServiceFactory.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/java/com/bigdata/rdf/sparql/ast/optimizers/ASTBindingAssigner.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/java/com/bigdata/rdf/store/BDS.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/TestNamedGraphs.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/TestUnions.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/MultiTenancyServlet.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/client/RemoteRepository.java
branches/BIGDATA_MGC_HA1_HA5/build.xml
branches/BIGDATA_MGC_HA1_HA5/src/resources/HAJournal/HAJournal.config
branches/BIGDATA_MGC_HA1_HA5/src/resources/bin/startHAServices
branches/BIGDATA_MGC_HA1_HA5/src/resources/etc/init.d/bigdataHA
Added Paths:
-----------
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/Berksfile
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/CHANGELOG.txt
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/Gemfile
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/README.txt
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/Thorfile
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/Vagrantfile
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/aws.rc
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/createCluster.sh
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/createSecurityGroup.py
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/setHosts.py
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/chefignore
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/default/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/default/test/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/default/test/default_test.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/metadata.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/java7.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/ssd.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/default/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/default/bigdataHA.erb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/init.d/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/init.d/bigdataHA.erb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/jetty.xml.erb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/log4jHA.properties.erb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/zoo.cfg.erb
branches/BIGDATA_MGC_HA1_HA5/bigdata-jini/src/test/com/bigdata/journal/jini/ha/TestHA3CancelQuery.java
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/named-graphs-ticket-888.rq
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/named-graphs-ticket-888.srx
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/named-graphs-ticket-888.trig
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/named-graphs-ticket-888b.rq
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/ticket_831.rq
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/ticket_831.srx
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/ticket_831.ttl
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/ticket_874.rq
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/ticket_874.srx
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/ticket_874.ttl
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/sparql/ast/eval/ticket_874b.rq
branches/BIGDATA_MGC_HA1_HA5/bigdata-sails/src/test/com/bigdata/rdf/sail/831.rq
branches/BIGDATA_MGC_HA1_HA5/bigdata-sails/src/test/com/bigdata/rdf/sail/831.ttl
branches/BIGDATA_MGC_HA1_HA5/bigdata-sails/src/test/com/bigdata/rdf/sail/874.rq
branches/BIGDATA_MGC_HA1_HA5/bigdata-sails/src/test/com/bigdata/rdf/sail/874.ttl
branches/BIGDATA_MGC_HA1_HA5/src/resources/bin/HARestore
branches/BIGDATA_MGC_HA1_HA5/src/resources/etc/default/
branches/BIGDATA_MGC_HA1_HA5/src/resources/etc/default/bigdata/
branches/BIGDATA_MGC_HA1_HA5/src/resources/etc/default/bigdataHA
Removed Paths:
-------------
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/Berksfile
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/CHANGELOG.txt
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/Gemfile
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/README.txt
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/Thorfile
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/Vagrantfile
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/attributes/default.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/aws.rc
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/createCluster.sh
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/createSecurityGroup.py
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/bin/setHosts.py
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/chefignore
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/default/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/default/test/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/files/default/test/default_test.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/metadata.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/default.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/java7.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/recipes/ssd.rb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/default/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/default/bigdataHA.erb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/init.d/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/init.d/bigdataHA.erb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/jetty.xml.erb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/log4jHA.properties.erb
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/resources/deployment/vagrant/systap-aws-bigdata-ha/templates/default/zoo.cfg.erb
branches/BIGDATA_MGC_HA1_HA5/src/resources/etc/bigdata/
branches/BIGDATA_MGC_HA1_HA5/src/resources/etc/default/bigdata/
branches/BIGDATA_MGC_HA1_HA5/src/resources/etc/default/bigdataHA
Property Changed:
----------------
branches/BIGDATA_MGC_HA1_HA5/
branches/BIGDATA_MGC_HA1_HA5/bigdata/lib/jetty/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/aggregate/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/joinGraph/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/util/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/htree/raba/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/jsr166/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/test/com/bigdata/bop/joinGraph/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/test/com/bigdata/bop/util/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/test/com/bigdata/jsr166/
branches/BIGDATA_MGC_HA1_HA5/bigdata/src/test/com/bigdata/util/httpd/
branches/BIGDATA_MGC_HA1_HA5/bigdata-compatibility/
branches/BIGDATA_MGC_HA1_HA5/bigdata-jini/src/java/com/bigdata/attr/
branches/BIGDATA_MGC_HA1_HA5/bigdata-jini/src/java/com/bigdata/disco/
branches/BIGDATA_MGC_HA1_HA5/bigdata-jini/src/java/com/bigdata/util/config/
branches/BIGDATA_MGC_HA1_HA5/bigdata-perf/
branches/BIGDATA_MGC_HA1_HA5/bigdata-perf/btc/
branches/BIGDATA_MGC_HA1_HA5/bigdata-perf/btc/src/resources/
branches/BIGDATA_MGC_HA1_HA5/bigdata-perf/lubm/
branches/BIGDATA_MGC_HA1_HA5/bigdata-perf/uniprot/
branches/BIGDATA_MGC_HA1_HA5/bigdata-perf/uniprot/src/
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/java/com/bigdata/bop/rdf/aggregate/
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/java/com/bigdata/rdf/changesets/
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/java/com/bigdata/rdf/error/
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/java/com/bigdata/rdf/internal/
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/java/com/bigdata/rdf/relation/
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/java/com/bigdata/rdf/util/
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/samples/
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/bop/rdf/aggregate/
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/internal/
branches/BIGDATA_MGC_HA1_HA5/bigdata-rdf/src/test/com/bigdata/rdf/relation/
branches/BIGDATA_MGC_HA1_HA5/bigdata-sails/src/java/com/bigdata/rdf/sail/bench/
branches/BIGDATA_MGC_HA1_HA5/bigdata-sails/src/java/com/bigdata/rdf/sail/changesets/
branches/BIGDATA_MGC_HA1_HA5/bigdata-sails/src/java/com/bigdata/rdf/sail/webapp/
branches/BIGDATA_MGC_HA1_HA5/bigdata-sails/src/test/com/bigdata/rdf/sail/bench/
branches/BIGDATA_MGC_HA1_HA5/bigdata-sails/src/test/com/bigdata/rdf/sail/webapp/
branches/BIGDATA_MGC_HA1_HA5/dsi-utils/
branches/BIGDATA_MGC_HA1_HA5/dsi-utils/LEGAL/
branches/BIGDATA_MGC_HA1_HA5/dsi-utils/lib/
branches/BIGDATA_MGC_HA1_HA5/dsi-utils/src/
branches/BIGDATA_MGC_HA1_HA5/dsi-utils/src/java/
branches/BIGDATA_MGC_HA1_HA5/dsi-utils/src/java/it/
branches/BIGDATA_MGC_HA1_HA5/dsi-utils/src/java/it/unimi/
branches/BIGDATA_MGC_HA1_HA5/dsi-utils/src/test/
branches/BIGDATA_MGC_HA1_HA5/dsi-utils/src/test/it/unimi/
branches/BIGDATA_MGC_HA1_HA5/dsi-utils/src/test/it/unimi/dsi/
branches/BIGDATA_MGC_HA1_HA5/lgpl-utils/src/java/it/unimi/dsi/fastutil/bytes/custom/
branches/BIGDATA_MGC_HA1_HA5/lgpl-utils/src/test/it/unimi/dsi/fastutil/bytes/custom/
branches/BIGDATA_MGC_HA1_HA5/osgi/
branches/BIGDATA_MGC_HA1_HA5/src/resources/bin/config/
Index: branches/BIGDATA_MGC_HA1_HA5
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5 2014-04-15 13:01:24 UTC (rev 8121)
Property changes on: branches/BIGDATA_MGC_HA1_HA5
___________________________________________________________________
Modified: svn:mergeinfo
## -1,5 +1,6 ##
/branches/BIGDATA_OPENRDF_2_6_9_UPDATE:6769-6785
/branches/BIGDATA_RELEASE_1_2_0:6766-7380
+/branches/BIGDATA_RELEASE_1_3_0:8025-8120
/branches/BTREE_BUFFER_BRANCH:2004-2045
/branches/DEV_BRANCH_27_OCT_2009:2270-2546,2548-2782
/branches/INT64_BRANCH:4486-4522
\ No newline at end of property
Index: branches/BIGDATA_MGC_HA1_HA5/bigdata/lib/jetty
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/lib/jetty 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/lib/jetty 2014-04-15 13:01:24 UTC (rev 8121)
Property changes on: branches/BIGDATA_MGC_HA1_HA5/bigdata/lib/jetty
___________________________________________________________________
Modified: svn:mergeinfo
## -1,5 +1,6 ##
/branches/BIGDATA_OPENRDF_2_6_9_UPDATE/bigdata/lib/jetty:6769-6785
/branches/BIGDATA_RELEASE_1_2_0/bigdata/lib/jetty:6766-7380
+/branches/BIGDATA_RELEASE_1_3_0/bigdata/lib/jetty:8025-8120
/branches/INT64_BRANCH/bigdata/lib/jetty:4486-4522
/branches/MGC_1_3_0/bigdata/lib/jetty:7609-7752
/branches/QUADS_QUERY_BRANCH/bigdata/lib/jetty:4525-4531,4533-4548,4550-4584,4586-4609,4611-4632,4634-4643,4646-4672,4674-4685,4687-4693,4697-4735,4737-4782,4784-4792,4794-4796,4798-4801
\ No newline at end of property
Index: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/aggregate
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/aggregate 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/aggregate 2014-04-15 13:01:24 UTC (rev 8121)
Property changes on: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/aggregate
___________________________________________________________________
Modified: svn:mergeinfo
## -1,5 +1,6 ##
/branches/BIGDATA_OPENRDF_2_6_9_UPDATE/bigdata/src/java/com/bigdata/bop/aggregate:6769-6785
/branches/BIGDATA_RELEASE_1_2_0/bigdata/src/java/com/bigdata/bop/aggregate:6766-7380
+/branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/bop/aggregate:8025-8120
/branches/INT64_BRANCH/bigdata/src/java/com/bigdata/bop/aggregate:4486-4522
/branches/MGC_1_3_0/bigdata/src/java/com/bigdata/bop/aggregate:7609-7752
/branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/aggregate:4525-4531,4533-4548,4550-4584,4586-4609,4611-4632,4634-4643,4646-4672,4674-4685,4687-4693,4697-4735,4737-4782,4784-4792,4794-4796,4798-4801
\ No newline at end of property
Index: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/joinGraph
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/joinGraph 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/joinGraph 2014-04-15 13:01:24 UTC (rev 8121)
Property changes on: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/joinGraph
___________________________________________________________________
Modified: svn:mergeinfo
## -1,5 +1,6 ##
/branches/BIGDATA_OPENRDF_2_6_9_UPDATE/bigdata/src/java/com/bigdata/bop/joinGraph:6769-6785
/branches/BIGDATA_RELEASE_1_2_0/bigdata/src/java/com/bigdata/bop/joinGraph:6766-7380
+/branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/bop/joinGraph:8025-8120
/branches/INT64_BRANCH/bigdata/src/java/com/bigdata/bop/joinGraph:4486-4522
/branches/MGC_1_3_0/bigdata/src/java/com/bigdata/bop/joinGraph:7609-7752
/branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/joinGraph:4525-4531,4533-4548,4550-4584,4586-4609,4611-4632,4634-4643,4646-4672,4674-4685,4687-4693,4697-4735,4737-4782,4784-4792,4794-4796,4798-4801
\ No newline at end of property
Index: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/util
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/util 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/util 2014-04-15 13:01:24 UTC (rev 8121)
Property changes on: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/bop/util
___________________________________________________________________
Modified: svn:mergeinfo
## -1,5 +1,6 ##
/branches/BIGDATA_OPENRDF_2_6_9_UPDATE/bigdata/src/java/com/bigdata/bop/util:6769-6785
/branches/BIGDATA_RELEASE_1_2_0/bigdata/src/java/com/bigdata/bop/util:6766-7380
+/branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/bop/util:8025-8120
/branches/INT64_BRANCH/bigdata/src/java/com/bigdata/bop/util:4486-4522
/branches/MGC_1_3_0/bigdata/src/java/com/bigdata/bop/util:7609-7752
/branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/bop/util:4525-4531,4533-4548,4550-4584,4586-4609,4611-4632,4634-4643,4646-4672,4674-4685,4687-4693,4697-4735,4737-4782,4784-4792,4794-4796,4798-4801
\ No newline at end of property
Modified: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/counters/ProcessReaderHelper.java
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/counters/ProcessReaderHelper.java 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/counters/ProcessReaderHelper.java 2014-04-15 13:01:24 UTC (rev 8121)
@@ -84,11 +84,11 @@
*/
public String readLine() throws IOException, InterruptedException {
- final Thread t = Thread.currentThread();
+// final Thread t = Thread.currentThread();
while(getActiveProcess().isAlive()) {
- if(t.isInterrupted()) {
+ if(Thread.interrupted()) {
throw new InterruptedException();
Modified: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/counters/win/TypeperfCollector.java
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/counters/win/TypeperfCollector.java 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/counters/win/TypeperfCollector.java 2014-04-15 13:01:24 UTC (rev 8121)
@@ -31,6 +31,7 @@
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
+import java.nio.channels.ClosedByInterruptException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
@@ -39,6 +40,7 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.CancellationException;
import org.apache.log4j.Logger;
@@ -52,6 +54,7 @@
import com.bigdata.counters.IRequiredHostCounters;
import com.bigdata.util.CSVReader;
import com.bigdata.util.CSVReader.Header;
+import com.bigdata.util.InnerCause;
/**
* Collects per-host performance counters on a Windows platform using
@@ -68,19 +71,19 @@
*/
public class TypeperfCollector extends AbstractProcessCollector {
- static protected final Logger log = Logger.getLogger(TypeperfCollector.class);
+ static private final Logger log = Logger.getLogger(TypeperfCollector.class);
- /**
- * True iff the {@link #log} level is INFO or less.
- */
- final protected static boolean INFO = log.isInfoEnabled();
+// /**
+// * True iff the {@link #log} level is INFO or less.
+// */
+// final protected static boolean INFO = log.isInfoEnabled();
+//
+// /**
+// * True iff the {@link #log} level is DEBUG or less.
+// */
+// final protected static boolean DEBUG = log.isDebugEnabled();
/**
- * True iff the {@link #log} level is DEBUG or less.
- */
- final protected static boolean DEBUG = log.isDebugEnabled();
-
- /**
* Updated each time a new row of data is read from the process and reported
* as the last modified time for counters based on that process and
* defaulted to the time that we begin to collect performance data.
@@ -175,6 +178,7 @@
}
+ @Override
public Double getValue() {
final Double value = (Double) vals.get(path);
@@ -189,6 +193,7 @@
}
+ @Override
public long lastModified() {
return lastModified;
@@ -199,7 +204,8 @@
* @throws UnsupportedOperationException
* always.
*/
- public void setValue(Double value, long timestamp) {
+ @Override
+ public void setValue(final Double value, final long timestamp) {
throw new UnsupportedOperationException();
@@ -225,6 +231,7 @@
*
* @throws IOException
*/
+ @Override
public List<String> getCommand() {
// make sure that our counters have been declared.
@@ -243,7 +250,7 @@
// counter names need to be double quoted for the command line.
command.add("\"" + decl.getCounterNameForWindows() + "\"");
- if(INFO) log.info("Will collect: \""
+ if(log.isInfoEnabled()) log.info("Will collect: \""
+ decl.getCounterNameForWindows() + "\" as "
+ decl.getPath());
@@ -255,6 +262,7 @@
}
+ @Override
public AbstractProcessReader getProcessReader() {
return new ProcessReader();
@@ -290,9 +298,10 @@
}
+ @Override
public void run() {
- if(INFO)
+ if(log.isInfoEnabled())
log.info("");
try {
@@ -300,27 +309,34 @@
// run
read();
- } catch (InterruptedException e) {
+ } catch (Exception e) {
- // Note: This is a normal exit.
- if(INFO)
- log.info("Interrupted - will terminate");
+ if (InnerCause.isInnerCause(e, InterruptedException.class)||
+ InnerCause.isInnerCause(e, ClosedByInterruptException.class)||
+ InnerCause.isInnerCause(e, CancellationException.class)
+ ) {
- } catch (Exception e) {
+ // Note: This is a normal exit.
+ if (log.isInfoEnabled())
+ log.info("Interrupted - will terminate");
- // Unexpected error.
- log.fatal(e.getMessage(), e);
+ } else {
+ // Unexpected error.
+ log.fatal(e.getMessage(), e);
+
+ }
+
}
- if(INFO)
+ if(log.isInfoEnabled())
log.info("Terminated");
}
private void read() throws Exception {
- if(INFO)
+ if(log.isInfoEnabled())
log.info("");
long nsamples = 0;
@@ -345,33 +361,34 @@
*/
csvReader.setTailDelayMillis(100/* ms */);
- try {
+// try {
- // read headers from the file.
- csvReader.readHeaders();
+ // read headers from the file.
+ csvReader.readHeaders();
- } catch (IOException ex) {
+// } catch (IOException ex) {
+//
+// /*
+// * Note: An IOException thrown out here often indicates an
+// * asynchronous close of of the reader. A common and benign
+// * cause of that is closing the input stream because the service
+// * is shutting down.
+// */
+//
+// if (!Thread.interrupted())
+// throw ex;
+//
+// throw new InterruptedException();
+//
+// }
- /*
- * Note: An IOException thrown out here often indicates an
- * asynchronous close of of the reader. A common and benign
- * cause of that is closing the input stream because the service
- * is shutting down.
- */
-
- if (!Thread.currentThread().isInterrupted())
- throw ex;
-
- throw new InterruptedException();
-
- }
-
/*
* replace the first header definition so that we get clean
* timestamps.
*/
csvReader.setHeader(0, new Header("Timestamp") {
- public Object parseValue(String text) {
+ @Override
+ public Object parseValue(final String text) {
try {
return f.parse(text);
@@ -390,7 +407,7 @@
*/
{
- if(INFO)
+ if(log.isInfoEnabled())
log.info("setting up headers.");
int i = 1;
@@ -400,7 +417,7 @@
final String path = decl.getPath();
// String path = hostPathPrefix + decl.getPath();
- if (INFO)
+ if (log.isInfoEnabled())
log.info("setHeader[i=" + i + "]=" + path);
csvReader.setHeader(i++, new Header(path));
@@ -409,13 +426,20 @@
}
- if(INFO)
+ if(log.isInfoEnabled())
log.info("starting row reads");
- final Thread t = Thread.currentThread();
+// final Thread t = Thread.currentThread();
- while (!t.isInterrupted() && csvReader.hasNext()) {
+ while (true) {
+ if (Thread.interrupted())
+ throw new InterruptedException();
+
+ if (!csvReader.hasNext()) {
+ break;
+ }
+
try {
final Map<String, Object> row = csvReader.next();
@@ -455,7 +479,7 @@
}
- if(INFO)
+ if(log.isInfoEnabled())
log.info("done.");
}
@@ -466,6 +490,7 @@
* Declares the performance counters to be collected from the Windows
* platform.
*/
+ @Override
public CounterSet getCounters() {
// if (root == null) {
Index: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/htree/raba
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/htree/raba 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/htree/raba 2014-04-15 13:01:24 UTC (rev 8121)
Property changes on: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/htree/raba
___________________________________________________________________
Modified: svn:mergeinfo
## -1,5 +1,6 ##
/branches/BIGDATA_OPENRDF_2_6_9_UPDATE/bigdata/src/java/com/bigdata/htree/raba:6769-6785
/branches/BIGDATA_RELEASE_1_2_0/bigdata/src/java/com/bigdata/htree/raba:6766-7380
+/branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/htree/raba:8025-8120
/branches/INT64_BRANCH/bigdata/src/java/com/bigdata/htree/raba:4486-4522
/branches/MGC_1_3_0/bigdata/src/java/com/bigdata/htree/raba:7609-7752
/branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/htree/raba:4525-4531,4533-4548,4550-4584,4586-4609,4611-4632,4634-4643,4646-4672,4674-4685,4687-4693,4697-4735,4737-4782,4784-4792,4794-4796,4798-4801
\ No newline at end of property
Index: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/jsr166
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/jsr166 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/jsr166 2014-04-15 13:01:24 UTC (rev 8121)
Property changes on: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/jsr166
___________________________________________________________________
Modified: svn:mergeinfo
## -1,5 +1,6 ##
/branches/BIGDATA_OPENRDF_2_6_9_UPDATE/bigdata/src/java/com/bigdata/jsr166:6769-6785
/branches/BIGDATA_RELEASE_1_2_0/bigdata/src/java/com/bigdata/jsr166:6766-7380
+/branches/BIGDATA_RELEASE_1_3_0/bigdata/src/java/com/bigdata/jsr166:8025-8120
/branches/INT64_BRANCH/bigdata/src/java/com/bigdata/jsr166:4486-4522
/branches/MGC_1_3_0/bigdata/src/java/com/bigdata/jsr166:7609-7752
/branches/QUADS_QUERY_BRANCH/bigdata/src/java/com/bigdata/jsr166:4525-4531,4533-4548,4550-4584,4586-4609,4611-4632,4634-4643,4646-4672,4674-4685,4687-4693,4697-4735,4737-4782,4784-4792,4794-4796,4798-4801
\ No newline at end of property
Modified: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/resources/AsynchronousOverflowTask.java
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/resources/AsynchronousOverflowTask.java 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/resources/AsynchronousOverflowTask.java 2014-04-15 13:01:24 UTC (rev 8121)
@@ -2848,7 +2848,7 @@
/**
* Note: This task is interrupted by {@link OverflowManager#shutdownNow()}.
- * Therefore is tests {@link Thread#isInterrupted()} and returns immediately
+ * Therefore it tests {@link Thread#isInterrupted()} and returns immediately
* if it has been interrupted.
*
* @return The return value is always null.
@@ -3374,7 +3374,10 @@
static protected boolean isNormalShutdown(
final ResourceManager resourceManager, final Throwable t) {
- if(Thread.currentThread().isInterrupted()) return true;
+ if (Thread.interrupted()) {
+ // Note: interrupt status of thread was cleared.
+ return true;
+ }
if (!resourceManager.isRunning()
|| !resourceManager.getConcurrencyManager()
Modified: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/search/FullTextIndex.java
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/search/FullTextIndex.java 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/search/FullTextIndex.java 2014-04-15 13:01:24 UTC (rev 8121)
@@ -955,35 +955,137 @@
}
+ /**
+ * Perform a range count on a full text query.
+ */
public int count(final FullTextQuery query) {
- final Hit[] a = _search(query);
+ if (cache.containsKey(query)) {
+
+ if (log.isInfoEnabled())
+ log.info("found hits in cache");
+
+ return cache.get(query).length;
+
+ } else {
+
+ if (log.isInfoEnabled())
+ log.info("did not find hits in cache");
+
+ }
+
+ // tokenize the query.
+ final TermFrequencyData<V> qdata = tokenize(query);
+
+ // No terms after stopword extraction
+ if (qdata == null) {
+
+ cache.put(query, new Hit[] {});
+
+ return 0;
+
+ }
+
+ /*
+ * We can run an optimized version of this (just a quick range count)
+ * but only if the caller does not care about exact match and has
+ * not specified a regex.
+ */
+ if (qdata.distinctTermCount() == 1 &&
+ !query.isMatchExact() && query.getMatchRegex() == null) {
+
+ final boolean prefixMatch = query.isPrefixMatch();
+
+ final Map.Entry<String, ITermMetadata> e = qdata.getSingletonEntry();
+
+ final String termText = e.getKey();
+
+ final ITermMetadata md = e.getValue();
+
+ final CountIndexTask<V> task1 = new CountIndexTask<V>(termText, 0, 1,
+ prefixMatch, md.getLocalTermWeight(), this);
+
+ return (int) task1.getRangeCount();
+
+ } else {
+
+ final Hit<V>[] a = _search(query);
+
+ return a.length;
+
+ }
- return a.length;
-
}
- public Hit<V>[] _search(final FullTextQuery q) {
+ protected TermFrequencyData<V> tokenize(final FullTextQuery query) {
- final String query = q.getQuery();
- final String languageCode = q.getLanguageCode();
- final boolean prefixMatch = q.isPrefixMatch();
- final double minCosine = q.getMinCosine();
- final double maxCosine = q.getMaxCosine();
- final int minRank = q.getMinRank();
- final int maxRank = q.getMaxRank();
- final boolean matchAllTerms = q.isMatchAllTerms();
- final boolean matchExact = q.isMatchExact();
- final String regex = q.getMatchRegex();
- long timeout = q.getTimeout();
- final TimeUnit unit = q.getTimeUnit();
+ final String q = query.getQuery();
+ final String languageCode = query.getLanguageCode();
+ final boolean prefixMatch = query.isPrefixMatch();
+ // tokenize the query.
+ final TermFrequencyData<V> qdata;
+ {
+
+ final TokenBuffer<V> buffer = new TokenBuffer<V>(1, this);
+
+ /*
+ * If we are using prefix match ('*' operator) then we don't want to
+ * filter stopwords from the search query.
+ */
+ final boolean filterStopwords = !prefixMatch;
+
+ index(buffer, //
+ null, // docId // was Long.MIN_VALUE
+ Integer.MIN_VALUE, // fieldId
+ languageCode,//
+ new StringReader(q), //
+ filterStopwords//
+ );
+
+ if (buffer.size() == 0) {
+
+ /*
+ * There were no terms after stopword extration.
+ */
+
+ log.warn("No terms after stopword extraction: query=" + query);
+
+ return null;
+
+ }
+
+ qdata = buffer.get(0);
+
+ qdata.normalize();
+
+ }
+
+ return qdata;
+
+ }
+
+ public Hit<V>[] _search(final FullTextQuery query) {
+
+ final String queryStr = query.getQuery();
+ final String languageCode = query.getLanguageCode();
+ final boolean prefixMatch = query.isPrefixMatch();
+ final double minCosine = query.getMinCosine();
+ final double maxCosine = query.getMaxCosine();
+ final int minRank = query.getMinRank();
+ final int maxRank = query.getMaxRank();
+ final boolean matchAllTerms = query.isMatchAllTerms();
+ final boolean matchExact = query.isMatchExact();
+ final String regex = query.getMatchRegex();
+ long timeout = query.getTimeout();
+ final TimeUnit unit = query.getTimeUnit();
+
final long begin = System.currentTimeMillis();
// if (languageCode == null)
// throw new IllegalArgumentException();
- if (query == null)
+ if (queryStr == null)
throw new IllegalArgumentException();
if (minCosine < 0d || minCosine > 1d)
@@ -1002,7 +1104,7 @@
throw new IllegalArgumentException();
if (log.isInfoEnabled())
- log.info("languageCode=[" + languageCode + "], text=[" + query
+ log.info("languageCode=[" + languageCode + "], text=[" + queryStr
+ "], minCosine=" + minCosine
+ ", maxCosine=" + maxCosine
+ ", minRank=" + minRank
@@ -1018,7 +1120,7 @@
}
- final FullTextQuery cacheKey = q;
+ final FullTextQuery cacheKey = query;
Hit<V>[] a;
@@ -1034,145 +1136,24 @@
if (log.isInfoEnabled())
log.info("did not find hits in cache");
- // tokenize the query.
- final TermFrequencyData<V> qdata;
- {
-
- final TokenBuffer<V> buffer = new TokenBuffer<V>(1, this);
-
- /*
- * If we are using prefix match ('*' operator) then we don't want to
- * filter stopwords from the search query.
- */
- final boolean filterStopwords = !prefixMatch;
-
- index(buffer, //
- null, // docId // was Long.MIN_VALUE
- Integer.MIN_VALUE, // fieldId
- languageCode,//
- new StringReader(query), //
- filterStopwords//
- );
-
- if (buffer.size() == 0) {
-
- /*
- * There were no terms after stopword extration.
- */
-
- log.warn("No terms after stopword extraction: query=" + query);
-
- a = new Hit[] {};
-
- cache.put(cacheKey, a);
-
- return a;
-
- }
-
- qdata = buffer.get(0);
-
- qdata.normalize();
-
- }
-
- final IHitCollector<V> hits;
-
- if (qdata.distinctTermCount() == 1) {
-
- final Map.Entry<String, ITermMetadata> e = qdata.getSingletonEntry();
-
- final String termText = e.getKey();
+ // tokenize the query.
+ final TermFrequencyData<V> qdata = tokenize(query);
+
+ // No terms after stopword extraction
+ if (qdata == null) {
- final ITermMetadata md = e.getValue();
-
- final CountIndexTask<V> task1 = new CountIndexTask<V>(termText, 0, 1, prefixMatch, md
- .getLocalTermWeight(), this);
-
- hits = new SingleTokenHitCollector<V>(task1);
-
- } else {
-
- final List<CountIndexTask<V>> tasks = new ArrayList<CountIndexTask<V>>(
- qdata.distinctTermCount());
-
- int i = 0;
- for (Map.Entry<String, ITermMetadata> e : qdata.terms.entrySet()) {
-
- final String termText = e.getKey();
-
- final ITermMetadata md = e.getValue();
-
- tasks.add(new CountIndexTask<V>(termText, i++, qdata.terms.size(), prefixMatch, md
- .getLocalTermWeight(), this));
-
- }
-
- hits = new MultiTokenHitCollector<V>(tasks);
-
- }
-
- // run the queries.
- {
-
- final List<Callable<Object>> tasks = new ArrayList<Callable<Object>>(
- qdata.distinctTermCount());
-
- int i = 0;
- for (Map.Entry<String, ITermMetadata> e : qdata.terms.entrySet()) {
-
- final String termText = e.getKey();
-
- final ITermMetadata md = e.getValue();
-
- tasks.add(new ReadIndexTask<V>(termText, i++, qdata.terms.size(),
- prefixMatch, md.getLocalTermWeight(), this, hits));
-
- }
-
- final ExecutionHelper<Object> executionHelper = new ExecutionHelper<Object>(
- getExecutorService(), timeout, unit);
-
- try {
-
- final long start = System.currentTimeMillis();
-
- executionHelper.submitTasks(tasks);
-
- if (log.isInfoEnabled()) {
- final long readTime = System.currentTimeMillis() - start;
- log.info("read time: " + readTime);
- }
-
- } catch (InterruptedException ex) {
-
- if (log.isInfoEnabled()) {
- // TODO Should we wrap and toss this interrupt instead?
- log.info("Interrupted - only partial results will be returned.");
- }
-
- /*
- * Yes, let's toss it. We were getting into a situation
- * where the ExecutionHelper above received an interrupt
- * but we still went through the heavy-weight filtering
- * operations below (matchExact or matchRegex).
- */
- throw new RuntimeException(ex);
-
- } catch (ExecutionException ex) {
-
- throw new RuntimeException(ex);
-
- }
-
- }
-
- a = hits.getHits();
-
+ cache.put(cacheKey, a = new Hit[] {});
+
+ return a;
+
+ }
+
+ a = executeQuery(qdata, prefixMatch, timeout, unit);
+
if (a.length == 0) {
log.info("No hits: languageCode=[" + languageCode + "], query=["
- + query + "]");
+ + queryStr + "]");
cache.put(cacheKey, a);
@@ -1223,14 +1204,14 @@
*/
if (matchExact) {
- a = matchExact(a, query);
+ a = matchExact(a, queryStr);
}
if (a.length == 0) {
log.warn("No hits after matchAllTerms pruning: languageCode=[" + languageCode + "], query=["
- + query + "]");
+ + queryStr + "]");
cache.put(cacheKey, a);
@@ -1260,7 +1241,7 @@
if (a.length == 0) {
log.warn("No hits after regex pruning: languageCode=[" + languageCode + "], query=["
- + query + "], regex=[" + regex + "]");
+ + queryStr + "], regex=[" + regex + "]");
cache.put(cacheKey, a);
@@ -1299,6 +1280,27 @@
}
+ /*
+ * Take a slice of the hits based on min/max cosine and min/max rank.
+ */
+ a = slice(query, a);
+
+ final long elapsed = System.currentTimeMillis() - begin;
+
+ if (log.isInfoEnabled())
+ log.info("Done: " + a.length + " hits in " + elapsed + "ms");
+
+ return a;
+
+ }
+
+ protected Hit<V>[] slice(final FullTextQuery query, Hit<V>[] a) {
+
+ final double minCosine = query.getMinCosine();
+ final double maxCosine = query.getMaxCosine();
+ final int minRank = query.getMinRank();
+ final int maxRank = query.getMaxRank();
+
// if (log.isDebugEnabled()) {
// log.debug("before min/max cosine/rank pruning:");
// for (Hit<V> h : a)
@@ -1422,13 +1424,106 @@
}
- final long elapsed = System.currentTimeMillis() - begin;
+ return a;
- if (log.isInfoEnabled())
- log.info("Done: " + a.length + " hits in " + elapsed + "ms");
+ }
+
+ protected Hit<V>[] executeQuery(final TermFrequencyData<V> qdata,
+ final boolean prefixMatch, final long timeout, final TimeUnit unit) {
+
+ final IHitCollector<V> hits;
+
+ if (qdata.distinctTermCount() == 1) {
+
+ final Map.Entry<String, ITermMetadata> e = qdata.getSingletonEntry();
+
+ final String termText = e.getKey();
+
+ final ITermMetadata md = e.getValue();
- return a;
+ final CountIndexTask<V> task1 = new CountIndexTask<V>(termText, 0, 1,
+ prefixMatch, md.getLocalTermWeight(), this);
+
+ hits = new SingleTokenHitCollector<V>(task1);
+
+ } else {
+
+ final List<CountIndexTask<V>> tasks = new ArrayList<CountIndexTask<V>>(
+ qdata.distinctTermCount());
+
+ int i = 0;
+ for (Map.Entry<String, ITermMetadata> e : qdata.terms.entrySet()) {
+
+ final String termText = e.getKey();
+
+ final ITermMetadata md = e.getValue();
+
+ tasks.add(new CountIndexTask<V>(termText, i++, qdata.terms.size(),
+ prefixMatch, md.getLocalTermWeight(), this));
+
+ }
+
+ hits = new MultiTokenHitCollector<V>(tasks);
+
+ }
+ // run the queries.
+ {
+
+ final List<Callable<Object>> tasks = new ArrayList<Callable<Object>>(
+ qdata.distinctTermCount());
+
+ int i = 0;
+ for (Map.Entry<String, ITermMetadata> e : qdata.terms.entrySet()) {
+
+ final String termText = e.getKey();
+
+ final ITermMetadata md = e.getValue();
+
+ tasks.add(new ReadIndexTask<V>(termText, i++, qdata.terms.size(),
+ prefixMatch, md.getLocalTermWeight(), this, hits));
+
+ }
+
+ final ExecutionHelper<Object> executionHelper = new ExecutionHelper<Object>(
+ getExecutorService(), timeout, unit);
+
+ try {
+
+ final long start = System.currentTimeMillis();
+
+ executionHelper.submitTasks(tasks);
+
+ if (log.isInfoEnabled()) {
+ final long readTime = System.currentTimeMillis() - start;
+ log.info("read time: " + readTime);
+ }
+
+ } catch (InterruptedException ex) {
+
+ if (log.isInfoEnabled()) {
+ // TODO Should we wrap and toss this interrupt instead?
+ log.info("Interrupted - only partial results will be returned.");
+ }
+
+ /*
+ * Yes, let's toss it. We were getting into a situation
+ * where the ExecutionHelper above received an interrupt
+ * but we still went through the heavy-weight filtering
+ * operations below (matchExact or matchRegex).
+ */
+ throw new RuntimeException(ex);
+
+ } catch (ExecutionException ex) {
+
+ throw new RuntimeException(ex);
+
+ }
+
+ }
+
+ return hits.getHits();
+
}
/**
Modified: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/search/ReadIndexTask.java
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/search/ReadIndexTask.java 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/search/ReadIndexTask.java 2014-04-15 13:01:24 UTC (rev 8121)
@@ -10,8 +10,6 @@
import com.bigdata.btree.ISimpleSplitHandler;
import com.bigdata.btree.ITuple;
import com.bigdata.btree.ITupleIterator;
-import com.bigdata.btree.keys.IKeyBuilder;
-import com.bigdata.btree.keys.SuccessorUtil;
/**
* Procedure reads on the terms index, aggregating data on a per-{@link Hit}
@@ -131,12 +129,12 @@
log.debug("queryTerm=" + queryTerm + ", termWeight="
+ queryTermWeight);
- final Thread t = Thread.currentThread();
+// final Thread t = Thread.currentThread();
while (itr.hasNext()) {
// don't test for interrupted on each result -- too much work.
- if (nhits % 1000 == 0 && t.isInterrupted()) {
+ if (nhits % 1000 == 0 && Thread.interrupted()) {
// if (log.isInfoEnabled())
log.warn("Interrupted: queryTerm=" + queryTerm + ", nhits="
Modified: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/service/ndx/pipeline/AbstractSubtask.java
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/service/ndx/pipeline/AbstractSubtask.java 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/service/ndx/pipeline/AbstractSubtask.java 2014-04-15 13:01:24 UTC (rev 8121)
@@ -337,8 +337,8 @@
public boolean hasNext() throws InterruptedException {
- // The thread in which this method runs.
- final Thread t = Thread.currentThread();
+// // The thread in which this method runs.
+// final Thread t = Thread.currentThread();
// when we start looking for a chunk.
final long begin = System.nanoTime();
@@ -349,7 +349,7 @@
master.halted();
// interrupted?
- if (t.isInterrupted()) {
+ if (Thread.interrupted()) {
throw master.halt(new InterruptedException(toString()));
Modified: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/service/proxy/ClientAsynchronousIterator.java
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/service/proxy/ClientAsynchronousIterator.java 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/service/proxy/ClientAsynchronousIterator.java 2014-04-15 13:01:24 UTC (rev 8121)
@@ -268,10 +268,13 @@
*/
private class ReaderTask implements Callable<Void> {
+ @Override
public Void call() throws Exception {
- final Thread t = Thread.currentThread();
+// final Thread t = Thread.currentThread();
+ boolean interrupted = false;
+
try {
/*
@@ -299,10 +302,11 @@
if (trace)
System.err.print('~');
- if (t.isInterrupted()) {
+ if (Thread.interrupted()) {
// thread interrupted, so we are done.
- break;
+ interrupted = true;
+ break; // break out of while(true)
}
@@ -344,10 +348,11 @@
*/
// don't call blocking method next() if we were interrupted.
- if (t.isInterrupted()) {
+ if (Thread.interrupted()) {
// thread interrupted, so we are done.
- break;
+ interrupted = true;
+ break; // break out of while(true)
}
@@ -392,7 +397,7 @@
}
if (INFO)
- log.info("Reader is done.");
+ log.info("Reader is done: interrupted" + interrupted);
return null;
@@ -448,7 +453,8 @@
}
}
-
+
+ @Override
public void close() {
if (future == null) {
Modified: branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/util/CSVReader.java
===================================================================
--- branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/util/CSVReader.java 2014-04-15 12:51:02 UTC (rev 8120)
+++ branches/BIGDATA_MGC_HA1_HA5/bigdata/src/java/com/bigdata/util/CSVReader.java 2014-04-15 13:01:24 UTC (rev 8121)
@@ -71,9 +71,9 @@
*/
public class CSVReader implements Iterator<Map<String, Object>> {
- protected static final Logger log = Logger.getLogger(CSVReader.class);
+ private static final Logger log = Logger.getLogger(CSVReader.class);
- protected static final boolean INFO = log.isInfoEnabled();
+// protected static final boolean INFO = log.isInfoEnabled();
/**
* The #of characters to buffer in the reader.
@@ -168,7 +168,7 @@
}
- public Header(String name) {
+ public Header(final String name) {
if (name == null)
throw new IllegalArgumentException();
@@ -191,13 +191,13 @@
*
* @return The parsed value.
*/
- public Object parseValue(String text) {
+ public Object parseValue(final String text) {
for (int i = 0; i < formats.length; i++) {
try {
- Format f = formats[i];
+ final Format f = formats[i];
if (f instanceof DateFormat) {
@@ -229,23 +229,41 @@
/**
* Equal if the headers have the same data.
*/
- public boolean equals(Header o) {
-
- if(this==o) return true;
-
- return name.equals(o.name);
-
+ @Override
+ public boolean equals(final Object o) {
+
+ if (this == o)
+ return true;
+
+ if (!(o instanceof Header)) {
+
+ return false;
+
+ }
+
+ return name.equals(((Header) o).name);
+
}
+// public boolean equals(final Header o) {
+//
+// if(this==o) return true;
+//
+// return name.equals(o.name);
+//
+// }
+
/**
* Based on the header name.
*/
+ @Override
public int hashCode() {
return name.hashCode();
}
+ @Override
public String toString() {
return name;
@@ -293,7 +311,8 @@
*/
protected Header[] headers;
- public CSVReader(InputStream is, String charSet) throws IOException {
+ public CSVReader(final InputStream is, final String charSet)
+ throws IOException {
if (is == null)
throw new IllegalArgumentException();
@@ -306,7 +325,7 @@
}
- public CSVReader(Reader r) throws IOException {
+ public CSVReader(final Reader r) throws IOException {
if (r == null)
throw new IllegalArgumentException();
@@ -340,9 +359,9 @@
}
- public boolean setSkipBlankLines(boolean skipBlankLines) {
+ public boolean setSkipBlankLines(final boolean skipBlankLines) {
- boolean tmp = this.skipBlankLines;
+ final boolean tmp = this.skipBlankLines;
this.skipBlankLines = skipBlankLines;
@@ -356,9 +375,9 @@
}
- public boolean setTrimWhitespace(boolean trimWhitespace) {
+ public boolean setTrimWhitespace(final boolean trimWhitespace) {
- boolean tmp = this.trimWhitespace;
+ final boolean tmp = this.trimWhitespace;
this.trimWhitespace = trimWhitespace;
@@ -384,10 +403,11 @@
}
- public long setTailDelayMillis(long tailDelayMillis) {
-
- if(tailDelayMillis<0) throw new IllegalArgumentException();
-
+ public long setTailDelayMillis(final long tailDelayMillis) {
+
+ if (tailDelayMillis < 0)
+ throw new IllegalArgumentException();
+
long tmp = this.tailDelayMillis;
this.tailDelayMillis = tailDelayMillis;
@@ -396,9 +416,11 @@
}
+ @Override
public boolean hasNext() {
- if(exhausted) return false;
+ if (exhausted)
+ return false;
if (line != null) {
@@ -406,17 +428,19 @@
}
- final Thread currentThread = Thread.currentThread();
+// final Thread currentThread = Thread.currentThread();
try {
while (true) {
- if(currentThread.isInterrupted()) {
+ if (Thread.interrupted()) {
- if(INFO)
+ if (log.isInfoEnabled())
log.info("Interrupted");
+ exhausted = true;
+
return false;
}
@@ -469,6 +493,7 @@
}
+ @Override
public Map<String, Object> next() {
if ...
[truncated message content] |