You can subscribe to this list here.
2007 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
(120) |
Sep
(36) |
Oct
(116) |
Nov
(17) |
Dec
(44) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2008 |
Jan
(143) |
Feb
(192) |
Mar
(74) |
Apr
(84) |
May
(105) |
Jun
(64) |
Jul
(49) |
Aug
(120) |
Sep
(159) |
Oct
(156) |
Nov
(51) |
Dec
(28) |
2009 |
Jan
(17) |
Feb
(55) |
Mar
(33) |
Apr
(57) |
May
(54) |
Jun
(28) |
Jul
(6) |
Aug
(16) |
Sep
(38) |
Oct
(30) |
Nov
(26) |
Dec
(52) |
2010 |
Jan
(7) |
Feb
(91) |
Mar
(65) |
Apr
(2) |
May
(14) |
Jun
(25) |
Jul
(38) |
Aug
(48) |
Sep
(80) |
Oct
(70) |
Nov
(75) |
Dec
(77) |
2011 |
Jan
(68) |
Feb
(53) |
Mar
(51) |
Apr
(35) |
May
(65) |
Jun
(101) |
Jul
(29) |
Aug
(230) |
Sep
(95) |
Oct
(49) |
Nov
(110) |
Dec
(63) |
2012 |
Jan
(41) |
Feb
(42) |
Mar
(25) |
Apr
(46) |
May
(51) |
Jun
(44) |
Jul
(45) |
Aug
(29) |
Sep
(12) |
Oct
(9) |
Nov
(17) |
Dec
(2) |
2013 |
Jan
(12) |
Feb
(14) |
Mar
(7) |
Apr
(16) |
May
(54) |
Jun
(27) |
Jul
(11) |
Aug
(5) |
Sep
(85) |
Oct
(27) |
Nov
(37) |
Dec
(32) |
2014 |
Jan
(8) |
Feb
(29) |
Mar
(5) |
Apr
(3) |
May
(22) |
Jun
(3) |
Jul
(4) |
Aug
(3) |
Sep
|
Oct
|
Nov
|
Dec
|
From: <lor...@us...> - 2013-06-28 11:54:20
|
Revision: 4010 http://sourceforge.net/p/dl-learner/code/4010 Author: lorenz_b Date: 2013-06-28 11:54:18 +0000 (Fri, 28 Jun 2013) Log Message: ----------- Omit timeout in single query mode for now til pagination in Cluas' API is fixed. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java 2013-06-28 11:53:26 UTC (rev 4009) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalDataPropertyAxiomLearner.java 2013-06-28 11:54:18 UTC (rev 4010) @@ -135,7 +135,7 @@ } private void runSPARQL1_1_Mode() { - int numberOfSubjects = reasoner.getSubjectCountForProperty(propertyToDescribe, getRemainingRuntimeInMilliSeconds()); + int numberOfSubjects = reasoner.getSubjectCountForProperty(propertyToDescribe);//TODO, getRemainingRuntimeInMilliSeconds()); if(numberOfSubjects == -1){ logger.warn("Early termination: Got timeout while counting number of distinct subjects for given property."); return; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java 2013-06-28 11:53:26 UTC (rev 4009) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/FunctionalObjectPropertyAxiomLearner.java 2013-06-28 11:54:18 UTC (rev 4010) @@ -137,7 +137,7 @@ private void runSPARQL1_1_Mode() { // get number of instances of s with <s p o> - int numberOfSubjects = reasoner.getSubjectCountForProperty(propertyToDescribe, getRemainingRuntimeInMilliSeconds()); + int numberOfSubjects = reasoner.getSubjectCountForProperty(propertyToDescribe);//TODO, getRemainingRuntimeInMilliSeconds()); if(numberOfSubjects == -1){ logger.warn("Early termination: Got timeout while counting number of distinct subjects for given property."); return; Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java 2013-06-28 11:53:26 UTC (rev 4009) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseFunctionalObjectPropertyAxiomLearner.java 2013-06-28 11:54:18 UTC (rev 4010) @@ -136,7 +136,7 @@ private void runSPARQL1_1_Mode() { // get number of instances of s with <s p o> - int numberOfObjects = reasoner.getObjectCountForProperty(propertyToDescribe, getRemainingRuntimeInMilliSeconds()); + int numberOfObjects = reasoner.getObjectCountForProperty(propertyToDescribe);//TODO, getRemainingRuntimeInMilliSeconds()); if(numberOfObjects == -1){ logger.warn("Early termination: Got timeout while counting number of distinct objects for given property."); return; @@ -164,12 +164,12 @@ } public static void main(String[] args) throws Exception{ - SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW()); + SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpedia()); InverseFunctionalObjectPropertyAxiomLearner l = new InverseFunctionalObjectPropertyAxiomLearner(ks); l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/profession")); l.setMaxExecutionTimeInSeconds(10); l.init(); - l.setForceSPARQL_1_0_Mode(true); +// l.setForceSPARQL_1_0_Mode(true); l.start(); System.out.println(l.getCurrentlyBestEvaluatedAxioms(1)); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-28 11:53:30
|
Revision: 4009 http://sourceforge.net/p/dl-learner/code/4009 Author: lorenz_b Date: 2013-06-28 11:53:26 +0000 (Fri, 28 Jun 2013) Log Message: ----------- Added methods without timeout. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-28 08:47:48 UTC (rev 4008) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-28 11:53:26 UTC (rev 4009) @@ -214,7 +214,20 @@ return cnt; } + + public int getSubjectCountForProperty(Property p){ + int cnt = -1; + String query = String.format( + "SELECT (COUNT(DISTINCT ?s) AS ?cnt) WHERE {?s <%s> ?o.}", + p.getName()); + ResultSet rs = executeSelectQuery(query); + if(rs.hasNext()){ + cnt = rs.next().getLiteral("cnt").getInt(); + } + return cnt; + } + public int getObjectCountForProperty(ObjectProperty p, long timeout){ int cnt = -1; String query = String.format( @@ -227,7 +240,20 @@ return cnt; } + + public int getObjectCountForProperty(ObjectProperty p){ + int cnt = -1; + String query = String.format( + "SELECT (COUNT(DISTINCT ?o) AS ?cnt) WHERE {?s <%s> ?o.}", + p.getName()); + ResultSet rs = executeSelectQuery(query); + if(rs.hasNext()){ + cnt = rs.next().getLiteral("cnt").getInt(); + } + return cnt; + } + public int getPopularity(NamedClass nc){ if(classPopularityMap != null && classPopularityMap.containsKey(nc)){ return classPopularityMap.get(nc); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-28 08:47:52
|
Revision: 4008 http://sourceforge.net/p/dl-learner/code/4008 Author: lorenz_b Date: 2013-06-28 08:47:48 +0000 (Fri, 28 Jun 2013) Log Message: ----------- Catch timeout exception. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java 2013-06-27 18:48:26 UTC (rev 4007) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/InverseObjectPropertyAxiomLearner.java 2013-06-28 08:47:48 UTC (rev 4008) @@ -42,7 +42,7 @@ import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; -@ComponentAnn(name="inverse objectproperty domain axiom learner", shortName="oplinv", version=0.1) +@ComponentAnn(name="inverse objectproperty axiom learner", shortName="oplinv", version=0.1) public class InverseObjectPropertyAxiomLearner extends AbstractAxiomLearningAlgorithm { private static final Logger logger = LoggerFactory.getLogger(InverseObjectPropertyAxiomLearner.class); @@ -78,7 +78,7 @@ } if(!forceSPARQL_1_0_Mode && ks.supportsSPARQL_1_1()){ - runSingleQueryMode(); + runSPARQL1_1_Mode(); } else { runSPARQL1_0_Mode(); } @@ -157,12 +157,12 @@ } public static void main(String[] args) throws Exception{ - SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint(new URL("http://[2001:638:902:2010:0:168:35:138]/sparql"))); + SparqlEndpointKS ks = new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW()); InverseObjectPropertyAxiomLearner l = new InverseObjectPropertyAxiomLearner(ks); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/animal")); - l.setMaxExecutionTimeInSeconds(10); - l.setForceSPARQL_1_0_Mode(true); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/routeEnd")); + l.setMaxExecutionTimeInSeconds(60); +// l.setForceSPARQL_1_0_Mode(true); // l.setReturnOnlyNewAxioms(true); l.init(); l.start(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-27 18:48:28
|
Revision: 4007 http://sourceforge.net/p/dl-learner/code/4007 Author: lorenz_b Date: 2013-06-27 18:48:26 +0000 (Thu, 27 Jun 2013) Log Message: ----------- Added method to get classes filtered by namespace remotely. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-27 18:47:37 UTC (rev 4006) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-27 18:48:26 UTC (rev 4007) @@ -504,6 +504,18 @@ } return types; } + + public Set<NamedClass> getOWLClasses(String namespace) { + Set<NamedClass> types = new HashSet<NamedClass>(); + String query = String.format("SELECT DISTINCT ?class WHERE {?class a <%s>. FILTER(REGEX(?class,'%s'))}",OWL.Class.getURI(), namespace); + ResultSet rs = executeSelectQuery(query); + QuerySolution qs; + while(rs.hasNext()){ + qs = rs.next(); + types.add(new NamedClass(qs.getResource("class").getURI())); + } + return types; + } /** * Returns a set of classes which are siblings, i.e. on the same level This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-27 18:47:39
|
Revision: 4006 http://sourceforge.net/p/dl-learner/code/4006 Author: lorenz_b Date: 2013-06-27 18:47:37 +0000 (Thu, 27 Jun 2013) Log Message: ----------- Updated enrichment script. Modified Paths: -------------- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-06-27 18:31:00 UTC (rev 4005) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-06-27 18:47:37 UTC (rev 4006) @@ -341,7 +341,7 @@ // loop over all entities and call appropriate algorithms - Set<NamedClass> classes = reasoner.getTypes();//st.getAllClasses(); + Set<NamedClass> classes = allowedNamespaces.isEmpty() ? reasoner.getOWLClasses() : reasoner.getOWLClasses(allowedNamespaces.iterator().next());//st.getAllClasses(); filterByNamespaces(classes);//classes = Sets.newHashSet(new NamedClass("http://dbpedia.org/ontology/Arachnid")); int entities = 0; for(NamedClass nc : classes) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-27 18:31:02
|
Revision: 4005 http://sourceforge.net/p/dl-learner/code/4005 Author: lorenz_b Date: 2013-06-27 18:31:00 +0000 (Thu, 27 Jun 2013) Log Message: ----------- Updated enrichment script. Modified Paths: -------------- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-06-21 13:36:20 UTC (rev 4004) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-06-27 18:31:00 UTC (rev 4005) @@ -52,13 +52,13 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; +import java.util.SortedSet; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import java.util.Set; -import java.util.SortedSet; import joptsimple.OptionException; import joptsimple.OptionParser; @@ -66,8 +66,6 @@ import joptsimple.OptionSpec; import org.aksw.commons.jena_owlapi.Conversion; -import org.aksw.jena_sparql_api.core.QueryExecutionFactory; -import org.apache.jena.riot.Lang; import org.apache.jena.riot.checker.CheckerLiterals; import org.apache.jena.riot.system.ErrorHandlerFactory; import org.apache.log4j.ConsoleAppender; @@ -75,7 +73,7 @@ import org.apache.log4j.Logger; import org.apache.log4j.SimpleLayout; import org.coode.owlapi.manchesterowlsyntax.ManchesterOWLSyntaxOntologyFormat; -import org.coode.owlapi.turtle.TurtleOntologyFormat; +import org.dllearner.algorithms.DisjointClassesLearner; import org.dllearner.algorithms.celoe.CELOE; import org.dllearner.algorithms.properties.AsymmetricObjectPropertyAxiomLearner; import org.dllearner.algorithms.properties.DataPropertyDomainAxiomLearner; @@ -158,9 +156,7 @@ import com.clarkparsia.owlapiv3.XSD; import com.google.common.collect.Sets; -import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.ResultSet; -import com.hp.hpl.jena.query.ResultSetFormatter; import com.hp.hpl.jena.rdf.model.Literal; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; @@ -306,7 +302,7 @@ dataPropertyAlgorithms.add(SubDataPropertyOfAxiomLearner.class); classAlgorithms = new LinkedList<Class<? extends LearningAlgorithm>>(); -// classAlgorithms.add(DisjointClassesLearner.class); + classAlgorithms.add(DisjointClassesLearner.class); // classAlgorithms.add(SimpleSubclassLearner.class); classAlgorithms.add(CELOE.class); @@ -359,26 +355,26 @@ break; } } -// entities = 0; -// Set<ObjectProperty> objectProperties = st.getAllObjectProperties(); -// filterByNamespaces(objectProperties); -// for(ObjectProperty property : objectProperties) { -// runObjectPropertyAlgorithms(ks, property); -// entities++; -// if(maxEntitiesPerType != -1 && entities > maxEntitiesPerType) { -// break; -// } -// } -// entities = 0; -// Set<DatatypeProperty> dataProperties = st.getAllDataProperties(); -// filterByNamespaces(dataProperties); -// for(DatatypeProperty property : dataProperties) { -// runDataPropertyAlgorithms(ks, property); -// entities++; -// if(maxEntitiesPerType != -1 && entities > maxEntitiesPerType) { -// break; -// } -// } + entities = 0; + Set<ObjectProperty> objectProperties = st.getAllObjectProperties(); + filterByNamespaces(objectProperties); + for(ObjectProperty property : objectProperties) { + runObjectPropertyAlgorithms(ks, property); + entities++; + if(maxEntitiesPerType != -1 && entities > maxEntitiesPerType) { + break; + } + } + entities = 0; + Set<DatatypeProperty> dataProperties = st.getAllDataProperties(); + filterByNamespaces(dataProperties); + for(DatatypeProperty property : dataProperties) { + runDataPropertyAlgorithms(ks, property); + entities++; + if(maxEntitiesPerType != -1 && entities > maxEntitiesPerType) { + break; + } + } } else { if(resource instanceof ObjectProperty) { System.out.println(resource + " appears to be an object property. Running appropriate algorithms.\n"); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-21 13:36:23
|
Revision: 4004 http://sourceforge.net/p/dl-learner/code/4004 Author: lorenz_b Date: 2013-06-21 13:36:20 +0000 (Fri, 21 Jun 2013) Log Message: ----------- Added namespace filter. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-18 15:14:54 UTC (rev 4003) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-21 13:36:20 UTC (rev 4004) @@ -94,7 +94,6 @@ @ConfigOption(name = "useCache", description = "Whether to use a DB cache", defaultValue = "true", required = false, propertyEditorClass = BooleanEditor.class) private boolean useCache = true; - private ExtractionDBCache cache; private QueryExecutionFactory qef; private SparqlEndpointKS ks; @@ -111,18 +110,11 @@ public SPARQLReasoner(SparqlEndpointKS ks) { - this.ks = ks; - - if(useCache){ - cache = new ExtractionDBCache("cache"); - } - classPopularityMap = new HashMap<NamedClass, Integer>(); - objectPropertyPopularityMap = new HashMap<ObjectProperty, Integer>(); + this(ks, (String)null); } - - public SPARQLReasoner(SparqlEndpointKS ks, ExtractionDBCache cache) { + + public SPARQLReasoner(SparqlEndpointKS ks, String cacheDirectory) { this.ks = ks; - this.cache = cache; classPopularityMap = new HashMap<NamedClass, Integer>(); objectPropertyPopularityMap = new HashMap<ObjectProperty, Integer>(); @@ -130,10 +122,10 @@ if(ks.isRemote()){ SparqlEndpoint endpoint = ks.getEndpoint(); qef = new QueryExecutionFactoryHttp(endpoint.getURL().toString(), endpoint.getDefaultGraphURIs()); - if(cache != null){ + if(cacheDirectory != null){ try { long timeToLive = TimeUnit.DAYS.toMillis(30); - CacheCoreEx cacheBackend = CacheCoreH2.create(cache.getCacheDirectory(), timeToLive, true); + CacheCoreEx cacheBackend = CacheCoreH2.create(cacheDirectory, timeToLive, true); CacheEx cacheFrontend = new CacheExImpl(cacheBackend); qef = new QueryExecutionFactoryCacheEx(qef, cacheFrontend); } catch (ClassNotFoundException e) { @@ -149,6 +141,10 @@ } } + public SPARQLReasoner(SparqlEndpointKS ks, ExtractionDBCache cache) { + this(ks, cache.getCacheDirectory()); + } + public SPARQLReasoner(OntModel model) { this.model = model; @@ -1410,7 +1406,7 @@ } public void setCache(ExtractionDBCache cache) { - this.cache = cache; +// this.cache = cache; } public void setUseCache(boolean useCache) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dc...@us...> - 2013-06-18 15:15:01
|
Revision: 4003 http://sourceforge.net/p/dl-learner/code/4003 Author: dcherix Date: 2013-06-18 15:14:54 +0000 (Tue, 18 Jun 2013) Log Message: ----------- Added examples for the sparqr webapp Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java trunk/examples/sparql/AristotlePosNeg.conf trunk/interfaces/pom.xml trunk/interfaces/src/main/java/org/dllearner/server/Rest.java Added Paths: ----------- trunk/examples/sparqr/ trunk/examples/sparqr/AristotlePosNeg.conf trunk/examples/sparqr/StGeorge.conf Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java 2013-06-17 10:35:31 UTC (rev 4002) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java 2013-06-18 15:14:54 UTC (rev 4003) @@ -26,7 +26,7 @@ if (aboxfilter != null) { builder.append(aboxfilter); } - builder.append("FILTER (! (?p=rdf:type))"); + builder.append("FILTER ( (?p!=rdf:type))"); builder.append("}"); monABoxQueryGeneration.stop(); return builder.toString(); Modified: trunk/examples/sparql/AristotlePosNeg.conf =================================================================== --- trunk/examples/sparql/AristotlePosNeg.conf 2013-06-17 10:35:31 UTC (rev 4002) +++ trunk/examples/sparql/AristotlePosNeg.conf 2013-06-18 15:14:54 UTC (rev 4003) @@ -11,7 +11,7 @@ // SPARQL options sparql.type = "SPARQL endpoint fragment" -sparql.url = "http://dbpedia.openlinksw.com:8890/sparql" +sparql.url = "http://dbpedia.org/sparql" sparql.defaultGraphURIs = {"http://dbpedia.org"} sparql.recursionDepth = 1 //TODOREFACTOR check if predefinedFilter works at all @@ -29,8 +29,8 @@ reasoner.type = "fast instance checker" reasoner.sources = {sparql} +reasoner.forAllSemantics="SomeOnly" - // we want to learn from positive and negative examples lp.type = "posNegStandard" lp.positiveExamples = { Added: trunk/examples/sparqr/AristotlePosNeg.conf =================================================================== --- trunk/examples/sparqr/AristotlePosNeg.conf (rev 0) +++ trunk/examples/sparqr/AristotlePosNeg.conf 2013-06-18 15:14:54 UTC (rev 4003) @@ -0,0 +1,107 @@ +/** + * Some people from Greece. + * Note: DBpedia is always subject to change, solutions will change over time + + * Possible Solution: + * Theorist OR (Mathematician AND Physicist) + */ + + +/****************** + * SPARQL options * + ******************/ +// sparql component to use +sparql.type = "sparqls" +// endpoint +sparql.endpointURL = "http://dbpedia.org/sparql" +// default graph uri for the SPARQL queries +sparql.defaultGraphURI = "http://dbpedia.org" +// recursion depth +sparql.recursionDepth = 2 +// url(s) of the schema definition of the used ontologies. +sparql.ontologySchemaUrls = {"http://downloads.dbpedia.org/3.6/dbpedia_3.6.owl" } + +//SPARQL Filters for the instances. Those filters excludes all literals, and all Catergory instances. Thereto a instance must begin with http://dbpedia.org/Resource +sparql.aboxfilter = "FILTER ( +!isLiteral(?o) && +!regex(str(?p), 'http://dbpedia.org/property/website') && +!regex(str(?p), 'http://dbpedia.org/property/wikipage') && +!regex(str(?p), 'http://dbpedia.org/property/wikiPageUsesTemplate') && +!regex(str(?p), 'http://dbpedia.org/property/reference') && +!regex(str(?p), 'http://www.w3.org/2004/02/skos/core') && +!regex(str(?p), 'http://www.w3.org/2002/07/owl#sameAs') && +!regex(str(?p), 'http://xmlns.com/foaf/0.1/') && +!regex(str(?p), 'http://dbpedia.org/property/wordnet_type') && +!regex(str(?p), 'http://dbpedia.org/property/wikilink') && +regex(str(?o), '^http://dbpedia.org/resource/') && +!regex(str(?o),'^http://dbpedia.org/resource/Category:') +) " +// SPARQL Filters for the classes. With this filter only class in the dbpedia ontology are accepted and at example not from YAGO +sparql.tboxfilter = "FILTER ( !regex(str(?class), '^http://upload.wikimedia.org/wikipedia') && +!regex(str(?class), '^http://dbpedia.org/resource/Template') && +!regex(str(?class), '^http://dbpedia.org/resource/Category:') && +!regex(str(?class), '^http://umbel.org/umbel/') + ) . " + +//A list of the start instances +sparql.instances = { +"http://dbpedia.org/resource/Democritus", +"http://dbpedia.org/resource/Zeno_of_Elea", +"http://dbpedia.org/resource/Plato", +"http://dbpedia.org/resource/Socrates", +"http://dbpedia.org/resource/Archytas", +"http://dbpedia.org/resource/Pythagoras", +"http://dbpedia.org/resource/Archimedes", +"http://dbpedia.org/resource/EuClid", +"http://dbpedia.org/resource/Heraclitus" +} + +/******************** + * Reasoner options * + ********************/ +reasoner.type = "fast instance checker" +reasoner.sources = {sparql} +reasoner.forAllSemantics="Standard" + +/***************************** + * Learning problems options * + *****************************/ +// we want to learn from positive and negative examples +lp.type = "posNegStandard" + +//the positives examples +lp.positiveExamples = { +"http://dbpedia.org/resource/Archytas", +"http://dbpedia.org/resource/Pythagoras", +"http://dbpedia.org/resource/Archimedes", +"http://dbpedia.org/resource/Thales" +} + +//the negatives examples +lp.negativeExamples = { +"http://dbpedia.org/resource/Democritus", +"http://dbpedia.org/resource/Zeno_of_Elea", +"http://dbpedia.org/resource/Plato", +"http://dbpedia.org/resource/Socrates", +"http://dbpedia.org/resource/EuClid", +"http://dbpedia.org/resource/Heraclitus" +} +lp.reasoner = reasoner + +/******************************* + * refinement operator options * + *******************************/ + +// create a refinement operator and configure it +op.type = "rho" +op.useHasValueConstructor = true +op.reasoner = reasoner + +/*************************** + * learn algorithm options * + ***************************/ +// we use the OCEL algorithm +alg.type = "ocel" +alg.reasoner = reasoner + + Added: trunk/examples/sparqr/StGeorge.conf =================================================================== --- trunk/examples/sparqr/StGeorge.conf (rev 0) +++ trunk/examples/sparqr/StGeorge.conf 2013-06-18 15:14:54 UTC (rev 4003) @@ -0,0 +1,151 @@ + + +/****************** + * SPARQL options * + ******************/ +// sparql component to use +sparql.type = "sparqls" +// endpoint +sparql.endpointURL = "http://dbpedia.org/sparql" +// default graph uri for the SPARQL queries +sparql.defaultGraphURI = "http://dbpedia.org" +// recursion depth +sparql.recursionDepth = 2 +// url(s) of the schema definition of the used ontologies. +sparql.ontologySchemaUrls = {"http://downloads.dbpedia.org/3.6/dbpedia_3.6.owl" } + +sparql.aboxfilter = "FILTER ( +!isLiteral(?o) && +!regex(str(?p), 'http://dbpedia.org/property/website') && +!regex(str(?p), 'http://dbpedia.org/property/wikipage') && +!regex(str(?p), 'http://dbpedia.org/property/wikiPageUsesTemplate') && +!regex(str(?p), 'http://dbpedia.org/property/reference') && +!regex(str(?p), 'http://www.w3.org/2004/02/skos/core') && +!regex(str(?p), 'http://www.w3.org/2002/07/owl#sameAs') && +!regex(str(?p), 'http://xmlns.com/foaf/0.1/') && +!regex(str(?p), 'http://dbpedia.org/property/wordnet_type') && +!regex(str(?p), 'http://dbpedia.org/property/wikilink') && +regex(str(?o), '^http://dbpedia.org/resource/') +) " + +sparql.tboxfilter = "FILTER ( !regex(str(?class), '^http://upload.wikimedia.org/wikipedia') && +!regex(str(?class), '^http://dbpedia.org/resource/Template') && +!regex(str(?class), '^http://dbpedia.org/resource/Category:') && +!regex(str(?class), '^http://umbel.org/umbel/') && +!regex(str(?class), '^http://dbpedia.org/class/yago') + ) . " + +//A list of the start instances +sparql.instances = {"http://dbpedia.org/resource/Gorden_Tallis", +"http://dbpedia.org/resource/Matthew_Elliott_(rugby_league)", +"http://dbpedia.org/resource/Graeme_Bradley", +"http://dbpedia.org/resource/Elton_Rasmussen", +"http://dbpedia.org/resource/Noel_Goldthorpe", +"http://dbpedia.org/resource/Shane_Kenward", +"http://dbpedia.org/resource/Anthony_Mundine", +"http://dbpedia.org/resource/Steve_Edge_(rugby_league)", +"http://dbpedia.org/resource/Robbie_Simpson_(rugby_league)", +"http://dbpedia.org/resource/Ivan_Henjak", +"http://dbpedia.org/resource/Dick_Huddart", +"http://dbpedia.org/resource/Mark_Coyne_(rugby_league)", +"http://dbpedia.org/resource/Henry_Tatana", +"http://dbpedia.org/resource/Col_Maxwell", +"http://dbpedia.org/resource/Wayne_Bartrim", +"http://dbpedia.org/resource/Mitch_Brennan", +"http://dbpedia.org/resource/Steve_Morris", +"http://dbpedia.org/resource/Brian_Johnston_(rugby_league)", +"http://dbpedia.org/resource/Herb_Narvo", +"http://dbpedia.org/resource/Chris_Johns_(rugby_league)", +"http://dbpedia.org/resource/Martin_Offiah", +"http://dbpedia.org/resource/Gorden_Tallis", +"http://dbpedia.org/resource/Lance_Thompson", +"http://dbpedia.org/resource/Rod_Reddy", +"http://dbpedia.org/resource/Wally_Fullerton_Smith", +"http://dbpedia.org/resource/Neil_Tierney", +"http://dbpedia.org/resource/Daniel_Wagon", +"http://dbpedia.org/resource/Ian_Herron", +"http://dbpedia.org/resource/Eric_Grothe%2C_Jr.", +"http://dbpedia.org/resource/Michael_Ennis", +"http://dbpedia.org/resource/Michael_Buettner", +"http://dbpedia.org/resource/Chris_Mortimer", +"http://dbpedia.org/resource/Feleti_Mateo", +"http://dbpedia.org/resource/John_Barclay_(cricketer)", +"http://dbpedia.org/resource/Lewis_Collins_(aviator)", +"http://dbpedia.org/resource/Johann_Mohr", +"http://dbpedia.org/resource/John_Williams_(VC)", +"http://dbpedia.org/resource/Paula_Pequeno", +"http://dbpedia.org/resource/Ben_Ross", +"http://dbpedia.org/resource/Colin_Forsyth", +"http://dbpedia.org/resource/Brian_Davies_(rugby_league)", +"http://dbpedia.org/resource/Harry_Taylor_(rugby_league)", +"http://dbpedia.org/resource/Wayne_Proctor_(rugby_league)" + } + +reasoner.type = "fast instance checker" +reasoner.sources = {sparql} + +lp.type = "posNegStandard" + +lp.positiveExamples = { +"http://dbpedia.org/resource/Matthew_Elliott_(rugby_league)", +"http://dbpedia.org/resource/Graeme_Bradley", +"http://dbpedia.org/resource/Elton_Rasmussen", +"http://dbpedia.org/resource/Noel_Goldthorpe", +"http://dbpedia.org/resource/Shane_Kenward", +"http://dbpedia.org/resource/Anthony_Mundine", +"http://dbpedia.org/resource/Steve_Edge_(rugby_league)", +"http://dbpedia.org/resource/Robbie_Simpson_(rugby_league)", +"http://dbpedia.org/resource/Ivan_Henjak", +"http://dbpedia.org/resource/Dick_Huddart", +"http://dbpedia.org/resource/Mark_Coyne_(rugby_league)", +"http://dbpedia.org/resource/Henry_Tatana", +"http://dbpedia.org/resource/Col_Maxwell", +"http://dbpedia.org/resource/Wayne_Bartrim", +"http://dbpedia.org/resource/Mitch_Brennan", +"http://dbpedia.org/resource/Steve_Morris", +"http://dbpedia.org/resource/Brian_Johnston_(rugby_league)", +"http://dbpedia.org/resource/Herb_Narvo", +"http://dbpedia.org/resource/Chris_Johns_(rugby_league)", +"http://dbpedia.org/resource/Martin_Offiah", +"http://dbpedia.org/resource/Gorden_Tallis", +"http://dbpedia.org/resource/Lance_Thompson", +"http://dbpedia.org/resource/Rod_Reddy", +"http://dbpedia.org/resource/Wally_Fullerton_Smith", +"http://dbpedia.org/resource/Neil_Tierney" +} + +lp.negativeExamples = { +"http://dbpedia.org/resource/Daniel_Wagon", +"http://dbpedia.org/resource/Ian_Herron", +"http://dbpedia.org/resource/Eric_Grothe%2C_Jr.", +"http://dbpedia.org/resource/Michael_Ennis", +"http://dbpedia.org/resource/Michael_Buettner", +"http://dbpedia.org/resource/Chris_Mortimer", +"http://dbpedia.org/resource/Feleti_Mateo", +"http://dbpedia.org/resource/John_Barclay_(cricketer)", +"http://dbpedia.org/resource/Lewis_Collins_(aviator)", +"http://dbpedia.org/resource/Johann_Mohr", +"http://dbpedia.org/resource/John_Williams_(VC)", +"http://dbpedia.org/resource/Paula_Pequeno", +"http://dbpedia.org/resource/Ben_Ross", +"http://dbpedia.org/resource/Colin_Forsyth", +"http://dbpedia.org/resource/Brian_Davies_(rugby_league)", +"http://dbpedia.org/resource/Harry_Taylor_(rugby_league)", +"http://dbpedia.org/resource/Wayne_Proctor_(rugby_league)" + } + +lp.reasoner = reasoner + + +op.type = "rho" +op.useNegation = false +op.useAllConstructor = false +op.useCardinalityRestrictions = false +op.useHasValueConstructor = true +op.reasoner = reasoner + + +alg.type = "ocel" +alg.reasoner = reasoner +alg.maxExecutionTimeInSeconds = 30 +alg.noisePercentage = 10.0 Property changes on: trunk/examples/sparqr/StGeorge.conf ___________________________________________________________________ Added: svn:executable ## -0,0 +1 ## +* \ No newline at end of property Modified: trunk/interfaces/pom.xml =================================================================== --- trunk/interfaces/pom.xml 2013-06-17 10:35:31 UTC (rev 4002) +++ trunk/interfaces/pom.xml 2013-06-18 15:14:54 UTC (rev 4003) @@ -356,7 +356,7 @@ <build> <finalName>dl-learner</finalName> <plugins> - <plugin> + <!-- <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-shade-plugin</artifactId> <version>1.6</version> @@ -376,7 +376,7 @@ </configuration> </execution> </executions> - </plugin> + </plugin>--> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>xmlbeans-maven-plugin</artifactId> @@ -521,8 +521,8 @@ <!-- Exclude Project-D from Project-B --> <artifactId>slf4j-log4j12</artifactId> </exclusion> - <exclusion> - <groupId>net.sourceforge</groupId> + <exclusion> + <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi</artifactId> </exclusion> </exclusions> Modified: trunk/interfaces/src/main/java/org/dllearner/server/Rest.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/server/Rest.java 2013-06-17 10:35:31 UTC (rev 4002) +++ trunk/interfaces/src/main/java/org/dllearner/server/Rest.java 2013-06-18 15:14:54 UTC (rev 4003) @@ -94,7 +94,7 @@ learningResult.put("manchester", ed.getDescription().toManchesterSyntaxString(null, null)); learningResult.put("kbsyntax", ed.getDescription().toKBSyntaxString()); // learningResult.put("sparql", sqd.getSparqlQuery(ed.getDescription())); - learningResult.put("sparql", sparqlConv.asQuery("?subject", OWLAPIConverter.getOWLAPIDescription(ed.getDescription()))); + learningResult.put("sparql", " "+ sparqlConv.asQuery("?subject", OWLAPIConverter.getOWLAPIDescription(ed.getDescription()))+" "); learningResult.put("accuracy", ed.getAccuracy()); learningResult.put("truePositives", EvaluatedDescriptionPosNeg.getJSONArray(ed.getCoveredPositives())); learningResult.put("falsePositives", EvaluatedDescriptionPosNeg.getJSONArray(ed.getNotCoveredPositives())); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-17 10:35:34
|
Revision: 4002 http://sourceforge.net/p/dl-learner/code/4002 Author: lorenz_b Date: 2013-06-17 10:35:31 +0000 (Mon, 17 Jun 2013) Log Message: ----------- Updated eval script. Modified Paths: -------------- trunk/scripts/pom.xml trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java Modified: trunk/scripts/pom.xml =================================================================== --- trunk/scripts/pom.xml 2013-06-16 10:40:01 UTC (rev 4001) +++ trunk/scripts/pom.xml 2013-06-17 10:35:31 UTC (rev 4002) @@ -146,7 +146,7 @@ <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>exec-maven-plugin</artifactId> - <version>1.2</version> + <version>1.2.1</version> <executions> <execution> @@ -159,7 +159,7 @@ <executable>java</executable> <arguments> <argument>-Xms512m</argument> - <argument>-Xmx3000m</argument> + <argument>-Xmx10000m</argument> </arguments> </configuration> </plugin> Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-06-16 10:40:01 UTC (rev 4001) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-06-17 10:35:31 UTC (rev 4002) @@ -74,6 +74,7 @@ import org.semanticweb.owlapi.model.OWLEntity; import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; import org.semanticweb.owlapi.model.OWLLiteral; +import org.semanticweb.owlapi.model.OWLLogicalAxiom; import org.semanticweb.owlapi.model.OWLObjectComplementOf; import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; import org.semanticweb.owlapi.model.OWLOntology; @@ -373,7 +374,7 @@ e.printStackTrace(); } } - System.exit(0); +// System.exit(0); Monitor patternTimeMon = MonitorFactory.getTimeMonitor("pattern-runtime"); //for each pattern @@ -438,10 +439,46 @@ } } +// private List<OWLAxiom> createSample(OWLOntology ontology, Collection<NamedClass> classes){ +// List<OWLAxiom> sample = new ArrayList<OWLAxiom>(); +// +// Set<OWLAxiom> axioms = ontology.getAxioms(); +// //filter out trivial axioms, e.g. A SubClassOf Thing or A EquivalentTo A and B +// filterOutTrivialAxioms(axioms); +// //filter out axioms below threshold +// filterOutAxiomsBelowThreshold(axioms, sampleThreshold); +// //get for each class some random axioms +// for (NamedClass cls : classes) { +// List<OWLAxiom> relatedAxioms = new ArrayList<OWLAxiom>(ontology.getReferencingAxioms(df.getOWLClass(IRI +// .create(cls.getName())))); +// Multimap<Double, OWLAxiom> accuracyWithAxioms = TreeMultimap.create(); +// for (OWLAxiom axiom : relatedAxioms) { +// double accuracy = getAccuracy(axiom); +// if(accuracy >= sampleThreshold){ +// accuracyWithAxioms.put(accuracy, axiom); +// } +// } +// //pick the set of axioms with highest score +// NavigableSet<Double> keySet = (NavigableSet<Double>)accuracyWithAxioms.keySet(); +// if(!keySet.isEmpty()){ +// Double score = keySet.first(); +// Collection<OWLAxiom> axiomsWithHighestScore = accuracyWithAxioms.get(score); +// List<OWLAxiom> axiomList = new ArrayList<OWLAxiom>(axiomsWithHighestScore); +// Collections.shuffle(axiomList, new Random(123)); +// if(!axiomList.isEmpty()){ +// sample.add(axiomList.get(0)); +// } +// } +// } +// +// Collections.shuffle(sample, new Random(123)); +// return sample.subList(0, Math.min(sampleSize, sample.size())); +// } + private List<OWLAxiom> createSample(OWLOntology ontology, Collection<NamedClass> classes){ List<OWLAxiom> sample = new ArrayList<OWLAxiom>(); - Set<OWLAxiom> axioms = ontology.getAxioms(); + Set<OWLLogicalAxiom> axioms = ontology.getLogicalAxioms(); //filter out trivial axioms, e.g. A SubClassOf Thing or A EquivalentTo A and B filterOutTrivialAxioms(axioms); //filter out axioms below threshold @@ -526,8 +563,8 @@ return axiomList.subList(0, Math.min(sampleSize, axiomList.size())); } - private void filterOutAxiomsBelowThreshold(Set<OWLAxiom> axioms, double threshold) { - for (Iterator<OWLAxiom> iter = axioms.iterator(); iter.hasNext();) { + private void filterOutAxiomsBelowThreshold(Set<? extends OWLAxiom> axioms, double threshold) { + for (Iterator<? extends OWLAxiom> iter = axioms.iterator(); iter.hasNext();) { OWLAxiom axiom = iter.next(); if(getAccuracy(axiom) < threshold){ iter.remove(); @@ -535,8 +572,8 @@ } } - private void filterOutTrivialAxioms(Set<OWLAxiom> axioms) { - for (Iterator<OWLAxiom> iter = axioms.iterator(); iter.hasNext();) { + private void filterOutTrivialAxioms(Set<? extends OWLAxiom> axioms) { + for (Iterator<? extends OWLAxiom> iter = axioms.iterator(); iter.hasNext();) { OWLAxiom axiom = iter.next(); if (axiom.isOfType(AxiomType.EQUIVALENT_CLASSES)) { if(((OWLEquivalentClassesAxiom) axiom).getClassExpressions().size() == 1){ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-16 10:40:04
|
Revision: 4001 http://sourceforge.net/p/dl-learner/code/4001 Author: lorenz_b Date: 2013-06-16 10:40:01 +0000 (Sun, 16 Jun 2013) Log Message: ----------- Updated eval script. Modified Paths: -------------- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-06-14 15:02:24 UTC (rev 4000) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-06-16 10:40:01 UTC (rev 4001) @@ -33,6 +33,7 @@ import java.util.Random; import java.util.Set; import java.util.TreeSet; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.prefs.Preferences; @@ -90,6 +91,10 @@ import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; import com.google.common.base.Charsets; import com.google.common.base.Joiner; +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; import com.google.common.collect.HashMultiset; import com.google.common.collect.Multimap; import com.google.common.collect.Multiset; @@ -153,7 +158,13 @@ private Connection conn; private PreparedStatement ps; - + + private LoadingCache<NamedClass, Model> fragments; + + private File samplesDir; + private File instantiationsDir; + + public OWLAxiomPatternUsageEvaluation() { try { BZip2CompressorInputStream is = new BZip2CompressorInputStream(new URL(ontologyURL).openStream()); @@ -169,6 +180,11 @@ } initDBConnection(); + + samplesDir = new File("pattern-instantiations-samples"); + samplesDir.mkdir(); + instantiationsDir = new File("pattern-instantiations"); + instantiationsDir.mkdir(); } private void initDBConnection() { @@ -218,13 +234,21 @@ List<OWLAxiom> patterns = getPatternsToEvaluate(patternOntology); //get all classes in KB - Collection<NamedClass> classes = reasoner.getTypes(ns); + Collection<NamedClass> classes = reasoner.getOWLClasses(); - //randomize and extract a chunk + //get n random classes which contain at least x instances + int minNrOfInstances = 5; List<NamedClass> classesList = new ArrayList<NamedClass>(classes); Collections.shuffle(classesList, new Random(123)); - classesList = classesList.subList(0, maxNrOfTestedClasses); - classes = classesList; + classes = new TreeSet<NamedClass>(); + for (NamedClass cls : classesList) { + if(reasoner.getIndividualsCount(cls) >= minNrOfInstances){ + classes.add(cls); + } + if(classes.size() == maxNrOfTestedClasses){ + break; + } + } classes = Collections.singleton(new NamedClass("http://dbpedia.org/ontology/BaseballPlayer")); //get the maximum modal depth in the pattern axioms @@ -264,8 +288,7 @@ logger.info("...on class " + cls + "..."); OWLClass owlClass = df.getOWLClass(IRI.create(cls.getName())); Model fragment = class2Fragment.get(cls); - Map<OWLAxiom, Score> result = applyPattern(pattern, owlClass, fragment); - Set<OWLAxiom> annotatedAxioms = asAnnotatedAxioms(result); + Set<OWLAxiom> annotatedAxioms = applyPattern(pattern, owlClass, fragment); filterOutTrivialAxioms(annotatedAxioms); filterOutAxiomsBelowThreshold(annotatedAxioms, threshold); int nrOfAxiomsLocal = annotatedAxioms.size(); @@ -277,7 +300,7 @@ patternClassTimeMon.stop(); write2DB(pattern, owlClass, patternClassTimeMon.getLastValue(), nrOfAxiomsLocal, nrOfAxiomsGlobal); } - ontology = save(pattern, learnedAxioms, file); + ontology = save(pattern, learnedAxioms); } else { OWLOntologyManager man = OWLManager.createOWLOntologyManager(); try { @@ -288,7 +311,7 @@ } patternTimeMon.stop(); if(sampling){ - List<OWLAxiom> sample = createSample(ontology);//, classes); + List<OWLAxiom> sample = createSample(ontology, classes); List<String> lines = new ArrayList<String>(); for (OWLAxiom axiom : sample) { double accuracy = getAccuracy(axiom); @@ -303,6 +326,95 @@ } } + public void runUsingFragmentExtraction2(SparqlEndpoint endpoint, OWLOntology patternOntology, File outputFile, int maxNrOfTestedClasses){ + ks = new SparqlEndpointKS(endpoint, cache); + SPARQLReasoner reasoner = new SPARQLReasoner(ks, cache); + + //get the axiom patterns to evaluate + List<OWLAxiom> patterns = getPatternsToEvaluate(patternOntology); + + //get all classes in KB + Collection<NamedClass> classes = reasoner.getOWLClasses(); + + //get n random classes which contain at least x instances + int minNrOfInstances = 5; + List<NamedClass> classesList = new ArrayList<NamedClass>(classes); + Collections.shuffle(classesList, new Random(123)); + classes = new TreeSet<NamedClass>(); + for (NamedClass cls : classesList) { + if(!cls.getName().startsWith("http://dbpedia.org/ontology/"))continue; + if (reasoner.getIndividualsCount(cls) >= minNrOfInstances) { + classes.add(cls); + } + if (classes.size() == maxNrOfTestedClasses) { + break; + } + } +// classes = Collections.singleton(new NamedClass("http://dbpedia.org/ontology/BaseballPlayer")); + + //get the maximum modal depth in the pattern axioms + final int maxModalDepth = maxModalDepth(patterns); + + //create cache and fill the cache + fragments = CacheBuilder.newBuilder() + .maximumSize(maxNrOfTestedClasses) + .expireAfterWrite(100, TimeUnit.HOURS) + .build( + new CacheLoader<NamedClass, Model>() { + public Model load(NamedClass cls) { + return extractFragment(cls, maxModalDepth); + } + }); + Model fragment; + for (NamedClass cls : classes) { + try { + fragment = fragments.get(cls); + } catch (ExecutionException e) { + e.printStackTrace(); + } + } + System.exit(0); + + Monitor patternTimeMon = MonitorFactory.getTimeMonitor("pattern-runtime"); + //for each pattern + for (OWLAxiom pattern : patterns) { + patternTimeMon.start(); + //run if not already exists a result on disk + File file = getPatternInstantiationsFile(pattern); + OWLOntology ontology = null; + if(!file.exists()){ + ontology = applyPattern(pattern, classes); + } else { + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + try { + ontology = man.loadOntologyFromOntologyDocument(file); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } + } + patternTimeMon.stop(); + if(sampling){ + + List<OWLAxiom> sample = createSample(ontology, classes); + List<String> lines = new ArrayList<String>(); + for (OWLAxiom axiom : sample) { + double accuracy = getAccuracy(axiom); + lines.add(axiomRenderer.render(axiom) + "," + format.format(accuracy)); + } + try { + Files.write(Joiner.on("\n").join(lines), new File(samplesDir, axiomRenderer.render(pattern).replace(" ", "_") + "-instantiations-sample.csv"), Charsets.UTF_8); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + } + + + private File getPatternInstantiationsFile(OWLAxiom pattern){ + return new File(instantiationsDir, axiomRenderer.render(pattern).replace(" ", "_") + "-instantiations.ttl"); + } + private Set<OWLAxiom> asAnnotatedAxioms(Map<OWLAxiom, Score> axioms2Score){ Set<OWLAxiom> annotatedAxioms = new HashSet<OWLAxiom>(); for (Entry<OWLAxiom, Score> entry : axioms2Score.entrySet()) { @@ -326,34 +438,39 @@ } } - private List<OWLAxiom> createSample(OWLOntology ontology){ + private List<OWLAxiom> createSample(OWLOntology ontology, Collection<NamedClass> classes){ + List<OWLAxiom> sample = new ArrayList<OWLAxiom>(); + Set<OWLAxiom> axioms = ontology.getAxioms(); + //filter out trivial axioms, e.g. A SubClassOf Thing or A EquivalentTo A and B filterOutTrivialAxioms(axioms); - for (Iterator<OWLAxiom> iter = axioms.iterator(); iter.hasNext();) { - OWLAxiom axiom = iter.next(); - double accuracy = getAccuracy(axiom); - if(accuracy < sampleThreshold){ - iter.remove(); - } else { - String axiomString = axiomRenderer.render(axiom); - boolean remove = false; - for (String s : entites2Ignore) { - if(axiomString.contains(s)){ - remove = true; - break; + //filter out axioms below threshold + filterOutAxiomsBelowThreshold(axioms, sampleThreshold); + //get for each class some random axioms + int limit = sampleSize / classes.size(); + while(!axioms.isEmpty() && sample.size() < sampleSize){ + for (NamedClass cls : classes) { + List<OWLAxiom> relatedAxioms = new ArrayList<OWLAxiom>(ontology.getReferencingAxioms(df.getOWLClass(IRI.create(cls.getName())))); + relatedAxioms.retainAll(axioms); + Collections.shuffle(relatedAxioms, new Random(123)); + int cnt = 0; + Iterator<OWLAxiom> iter = relatedAxioms.iterator(); + while(iter.hasNext() && cnt < limit){ + OWLAxiom axiom = iter.next(); + if(!sample.contains(axiom)){ + sample.add(axiom); + axioms.remove(axiom); + cnt++; } } - if(remove){ - iter.remove(); - } } } - List<OWLAxiom> axiomList = new ArrayList<OWLAxiom>(axioms); - Collections.shuffle(axiomList, new Random(123)); - return axiomList.subList(0, Math.min(sampleSize, axiomList.size())); + + Collections.shuffle(sample, new Random(123)); + return sample.subList(0, Math.min(sampleSize, sample.size())); } - private List<OWLAxiom> createSample(OWLOntology ontology, Collection<NamedClass> classes){ + private List<OWLAxiom> createSample2(OWLOntology ontology, Collection<NamedClass> classes){ List<OWLAxiom> axiomList = new ArrayList<OWLAxiom>(); for (NamedClass cls : classes) { OWLClass owlClass = df.getOWLClass(IRI.create(cls.getName())); @@ -507,74 +624,92 @@ private Map<NamedClass, Model> extractFragments(Collection<NamedClass> classes, int depth){ Map<NamedClass, Model> class2Fragment = new HashMap<NamedClass, Model>(); - //get the maximum modal depth in the patterns + Model fragment; for (NamedClass cls : classes) { - logger.info("Extracting fragment for " + cls + "..."); - Model fragment = ModelFactory.createDefaultModel(); - //try to load from cache - HashFunction hf = Hashing.md5(); - HashCode hc = hf.newHasher().putString(cls.getName()).hash(); - File file = new File("pattern-cache/" + hc.toString() + ".ttl"); - if(file.exists()){ - try { - fragment.read(new FileInputStream(file), null, "TURTLE"); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } - filterModel(fragment); - class2Fragment.put(cls, fragment); - logger.info("...got " + fragment.size() + " triples."); - continue; - } - - //build the CONSTRUCT query - Query query = buildConstructQuery(cls, depth); - query.setLimit(queryLimit); - //get triples until time elapsed - long startTime = System.currentTimeMillis(); - int offset = 0; - boolean hasMoreResults = true; - while(hasMoreResults && (System.currentTimeMillis() - startTime)<= maxFragmentExtractionTime){ - query.setOffset(offset); - logger.info(query); - Model m = executeConstructQuery(query); - fragment.add(m); - if(m.size() == 0){ - hasMoreResults = false; - } - offset += queryLimit; - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - logger.info("...got " + fragment.size() + " triples."); + fragment = extractFragment(cls, depth); + class2Fragment.put(cls, fragment); + } + return class2Fragment; + } + + private Model extractFragment(NamedClass cls, int depth){ + logger.info("Extracting fragment for " + cls + "..."); + Model fragment = ModelFactory.createDefaultModel(); + //try to load from cache + HashFunction hf = Hashing.md5(); + HashCode hc = hf.newHasher().putString(cls.getName()).hash(); + File file = new File("pattern-cache/" + hc.toString() + ".ttl"); + if(file.exists()){ try { - fragment.write(new FileOutputStream(file), "TURTLE"); + fragment.read(new FileInputStream(file), null, "TURTLE"); } catch (FileNotFoundException e) { e.printStackTrace(); } filterModel(fragment); - class2Fragment.put(cls, fragment); + logger.info("...got " + fragment.size() + " triples."); + return fragment; } - return class2Fragment; + + //build the CONSTRUCT query + Query query = buildConstructQuery(cls, depth); + query.setLimit(queryLimit); + //get triples until time elapsed + long startTime = System.currentTimeMillis(); + int offset = 0; + boolean hasMoreResults = true; + while(hasMoreResults && (System.currentTimeMillis() - startTime)<= maxFragmentExtractionTime){ + query.setOffset(offset); + logger.info(query); + Model m = executeConstructQuery(query); + fragment.add(m); + if(m.size() == 0){ + hasMoreResults = false; + } + offset += queryLimit; + try { + Thread.sleep(500); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + logger.info("...got " + fragment.size() + " triples."); + try { + fragment.write(new FileOutputStream(file), "TURTLE"); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + filterModel(fragment); + return fragment; } private void filterModel(Model model){ + Set<String> blackList = Sets.newHashSet( + "http://dbpedia.org/ontology/thumbnail", + "http://dbpedia.org/ontology/wikiPageRedirects", + "http://dbpedia.org/ontology/wikiPageExternalLink", + "http://dbpedia.org/ontology/wikiPageWikiLink", + "http://dbpedia.org/ontology/wikiPageRevisionID", + "http://dbpedia.org/ontology/wikiPageID", + "http://dbpedia.org/ontology/wikiPageDisambiguates", + "http://dbpedia.org/ontology/wikiPageInterLanguageLink", + "http://dbpedia.org/ontology/abstract" + ); List<Statement> statements2Remove = new ArrayList<Statement>(); for (Statement st : model.listStatements().toSet()) { - if(st.getObject().isLiteral()){ - statements2Remove.add(st); + if(st.getPredicate().equals(RDF.type)){ + if(st.getObject().isURIResource() && !st.getObject().asResource().getURI().startsWith("http://dbpedia.org/ontology/")){ + statements2Remove.add(st); + } + } else { + if(!st.getPredicate().getURI().startsWith("http://dbpedia.org/ontology/")){ + statements2Remove.add(st); + } else { + if(blackList.contains(st.getPredicate().getURI())){ + statements2Remove.add(st); + } + } } - if(st.getPredicate().equals(RDF.type) && !st.getObject().asResource().getURI().startsWith("http://dbpedia.org/ontology/")){ - statements2Remove.add(st); - } - if(st.getPredicate().hasURI("http://xmlns.com/foaf/0.1/depiction") || st.getPredicate().hasURI("http://dbpedia.org/ontology/thumbnail")){ - statements2Remove.add(st); - } else if(!st.getPredicate().equals(RDF.type) && !st.getPredicate().getURI().startsWith("http://dbpedia.org/ontology/")){ - statements2Remove.add(st); - } + } model.remove(statements2Remove); } @@ -775,7 +910,43 @@ return axioms2Score; } - private Map<OWLAxiom, Score> applyPattern(OWLAxiom pattern, OWLClass cls, Model fragment) { + private OWLOntology applyPattern(OWLAxiom pattern, Collection<NamedClass> classes) { + logger.info("Applying pattern " + pattern + "..."); + Set<OWLAxiom> learnedAxioms = new HashSet<OWLAxiom>(); + Monitor patternClassTimeMon = MonitorFactory.getTimeMonitor("class-pattern-runtime"); + // for each class + for (NamedClass cls : classes) { + logger.info("...on class " + cls + "..."); + try { + OWLClass owlClass = df.getOWLClass(IRI.create(cls.getName())); + + //get the fragment + Model fragment = fragments.get(cls); + + //apply the pattern + patternClassTimeMon.start(); + Set<OWLAxiom> annotatedAxioms = applyPattern(pattern, owlClass, fragment); + patternClassTimeMon.stop(); + + filterOutTrivialAxioms(annotatedAxioms); + filterOutAxiomsBelowThreshold(annotatedAxioms, threshold); + int nrOfAxiomsLocal = annotatedAxioms.size(); + annotatedAxioms = computeScoreGlobal(annotatedAxioms, owlClass); + filterOutAxiomsBelowThreshold(annotatedAxioms, threshold); + int nrOfAxiomsGlobal = annotatedAxioms.size(); + learnedAxioms.addAll(annotatedAxioms); + printAxioms(annotatedAxioms, threshold); + + write2DB(pattern, owlClass, patternClassTimeMon.getLastValue(), nrOfAxiomsLocal, nrOfAxiomsGlobal); + } catch (ExecutionException e) { + e.printStackTrace(); + } + } + OWLOntology ontology = save(pattern, learnedAxioms); + return ontology; + } + + private Set<OWLAxiom> applyPattern(OWLAxiom pattern, OWLClass cls, Model fragment) { Map<OWLAxiom, Score> axioms2Score = new HashMap<OWLAxiom, Score>(); OWLClassExpression patternSubClass = null; @@ -798,7 +969,7 @@ patternSuperClass = ((OWLSubClassOfAxiom) pattern).getSuperClass(); } else { logger.warn("Pattern " + pattern + " not supported yet."); - return axioms2Score; + return asAnnotatedAxioms(axioms2Score); } Set<OWLEntity> signature = patternSuperClass.getSignature(); @@ -851,7 +1022,7 @@ axioms2Score.put(axiom, score); } - return axioms2Score; + return asAnnotatedAxioms(axioms2Score); } private void write2DB(OWLAxiom pattern, OWLClass cls, double runtime, int nrOfAxiomsLocal, int nrOfAxiomsGlobal){ @@ -964,11 +1135,11 @@ return template.asQuery(); } - private OWLOntology save(OWLAxiom pattern, Set<OWLAxiom> learnedAxioms, File file){ + private OWLOntology save(OWLAxiom pattern, Set<OWLAxiom> learnedAxioms){ try { OWLOntologyManager man = OWLManager.createOWLOntologyManager(); OWLOntology ontology = man.createOntology(learnedAxioms); - man.saveOntology(ontology, new TurtleOntologyFormat(), new FileOutputStream(file)); + man.saveOntology(ontology, new TurtleOntologyFormat(), new FileOutputStream(getPatternInstantiationsFile(pattern))); return ontology; } catch (OWLOntologyCreationException e) { e.printStackTrace(); @@ -1133,15 +1304,18 @@ superClass = ((OWLSubClassOfAxiom)axiom).getSuperClass(); } //count subclass+superClass - Query query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(cls, superClass), true);System.out.println(query); + System.out.println("Counting instances of " + df.getOWLObjectIntersectionOf(cls, superClass) + "..."); + Query query = converter.asQuery("?x", df.getOWLObjectIntersectionOf(cls, superClass), true); rs = executeSelectQuery(query); int overlap = rs.next().getLiteral("cnt").getInt(); + System.out.println("..." + overlap + " instances."); //count subclass - query = converter.asQuery("?x", cls, true); if(subClassCnt == -1){ - System.out.println(query); + System.out.println("Counting instances of " + cls); + query = converter.asQuery("?x", cls, true); rs = executeSelectQuery(query); subClassCnt = rs.next().getLiteral("cnt").getInt(); + System.out.println("..." + subClassCnt + " instances."); } //compute recall @@ -1152,9 +1326,11 @@ continue; } //count superClass - query = converter.asQuery("?x", superClass, true);System.out.println(query); + System.out.println("Counting instances of " + superClass); + query = converter.asQuery("?x", superClass, true); rs = executeSelectQuery(query); int superClassCnt = rs.next().getLiteral("cnt").getInt(); + System.out.println("..." + superClassCnt + " instances."); //compute precision double precision = wald(superClassCnt, overlap); @@ -1320,7 +1496,7 @@ System.exit(0); } int maxNrOfTestedClasses = (Integer) options.valueOf("limit"); - new OWLAxiomPatternUsageEvaluation().runUsingFragmentExtraction(endpoint, patternsOntology, outputFile, maxNrOfTestedClasses); + new OWLAxiomPatternUsageEvaluation().runUsingFragmentExtraction2(endpoint, patternsOntology, outputFile, maxNrOfTestedClasses); } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2013-06-14 15:02:27
|
Revision: 4000 http://sourceforge.net/p/dl-learner/code/4000 Author: jenslehmann Date: 2013-06-14 15:02:24 +0000 (Fri, 14 Jun 2013) Log Message: ----------- added another semantic for all quantors to fast instance checker Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2013-06-14 12:41:49 UTC (rev 3999) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2013-06-14 15:02:24 UTC (rev 4000) @@ -139,7 +139,11 @@ "use those which have at least one r-filler and do not have an r-filler not in C.",defaultValue = "standard",propertyEditorClass = StringTrimmerEditor.class) private ForallSemantics forallSemantics = ForallSemantics.Standard; - public enum ForallSemantics { Standard, SomeOnly } + public enum ForallSemantics { + Standard, // standard all quantor + NonEmpty, // p only C for instance a returns false if there is no fact p(a,x) for any x + SomeOnly // p only C for instance a returns false if there is no fact p(a,x) with x \ in C + } /** * Creates an instance of the fast instance checker. @@ -401,6 +405,7 @@ return true; } SortedSet<Individual> roleFillers = opPos.get(op).get(individual); + if (roleFillers == null) { if(forallSemantics == ForallSemantics.Standard) { return true; @@ -408,12 +413,20 @@ return false; } } + boolean hasCorrectFiller = false; for (Individual roleFiller : roleFillers) { - if (!hasTypeImpl(child, roleFiller)) { + if (hasTypeImpl(child, roleFiller)) { + hasCorrectFiller = true; + } else { return false; - } + } } - return true; + + if(forallSemantics == ForallSemantics.SomeOnly) { + return hasCorrectFiller; + } else { + return true; + } } else if (description instanceof ObjectMinCardinalityRestriction) { ObjectPropertyExpression ope = ((ObjectCardinalityRestriction) description).getRole(); if (!(ope instanceof ObjectProperty)) { Modified: trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java 2013-06-14 12:41:49 UTC (rev 3999) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java 2013-06-14 15:02:24 UTC (rev 4000) @@ -31,6 +31,7 @@ * */ public class SomeOnlyReasonerTest { + @Test public void someOnlyTest() throws ComponentInitException, LearningProblemUnsupportedException { // TODO: use aksw-commons-sparql instead of sparql-scala This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dc...@us...> - 2013-06-14 12:41:53
|
Revision: 3999 http://sourceforge.net/p/dl-learner/code/3999 Author: dcherix Date: 2013-06-14 12:41:49 +0000 (Fri, 14 Jun 2013) Log Message: ----------- Changed in the Rest api class to use the OWLClassExpressionToSparqlConverter Modified Paths: -------------- trunk/interfaces/src/main/java/org/dllearner/server/Rest.java Modified: trunk/interfaces/src/main/java/org/dllearner/server/Rest.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/server/Rest.java 2013-06-14 09:24:13 UTC (rev 3998) +++ trunk/interfaces/src/main/java/org/dllearner/server/Rest.java 2013-06-14 12:41:49 UTC (rev 3999) @@ -9,6 +9,8 @@ import org.dllearner.core.LearningAlgorithm; import org.dllearner.kb.sparql.SparqlQueryDescriptionConvertVisitor; import org.dllearner.learningproblems.EvaluatedDescriptionPosNeg; +import org.dllearner.utilities.owl.OWLAPIConverter; +import org.dllearner.utilities.owl.OWLClassExpressionToSPARQLConverter; import org.json.simple.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -73,7 +75,7 @@ "} "; learningResult.put("success", "1"); - learningResult.put("manchester", manchester); + learningResult.put("manchester", manchester); learningResult.put("kbsyntax", "other syntax"); learningResult.put("sparql", sparql); learningResult.put("accuracy", 1.0); @@ -87,11 +89,12 @@ SparqlQueryDescriptionConvertVisitor sqd = new SparqlQueryDescriptionConvertVisitor(); sqd.setLimit(limit); - - learningResult.put("success", "1"); + OWLClassExpressionToSPARQLConverter sparqlConv = new OWLClassExpressionToSPARQLConverter(); + learningResult.put("success", "1"); learningResult.put("manchester", ed.getDescription().toManchesterSyntaxString(null, null)); learningResult.put("kbsyntax", ed.getDescription().toKBSyntaxString()); - learningResult.put("sparql", sqd.getSparqlQuery(ed.getDescription())); +// learningResult.put("sparql", sqd.getSparqlQuery(ed.getDescription())); + learningResult.put("sparql", sparqlConv.asQuery("?subject", OWLAPIConverter.getOWLAPIDescription(ed.getDescription()))); learningResult.put("accuracy", ed.getAccuracy()); learningResult.put("truePositives", EvaluatedDescriptionPosNeg.getJSONArray(ed.getCoveredPositives())); learningResult.put("falsePositives", EvaluatedDescriptionPosNeg.getJSONArray(ed.getNotCoveredPositives())); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dc...@us...> - 2013-06-14 09:24:15
|
Revision: 3998 http://sourceforge.net/p/dl-learner/code/3998 Author: dcherix Date: 2013-06-14 09:24:13 +0000 (Fri, 14 Jun 2013) Log Message: ----------- Test for the ForallSemantics property of the FastInstanceChecker Added Paths: ----------- trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java Added: trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java (rev 0) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java 2013-06-14 09:24:13 UTC (rev 3998) @@ -0,0 +1,116 @@ +/** + * + */ +package org.dllearner.test.junit; + +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.SortedSet; +import java.util.TreeSet; + +import org.dllearner.algorithms.celoe.CELOE; +import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.ComponentInitException; +import org.dllearner.core.ComponentManager; +import org.dllearner.core.LearningProblemUnsupportedException; +import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.Individual; +import org.dllearner.core.owl.ObjectAllRestriction; +import org.dllearner.kb.sparql.simple.SparqlSimpleExtractor; +import org.dllearner.learningproblems.PosNegLPStandard; +import org.dllearner.reasoning.FastInstanceChecker; +import org.dllearner.reasoning.FastInstanceChecker.ForallSemantics; +import org.dllearner.refinementoperators.RhoDRDown; +import org.dllearner.utilities.datastructures.Datastructures; +import org.dllearner.utilities.datastructures.SortedSetTuple; +import org.junit.Test; + +/** + * @author didiers + * + */ +public class SomeOnlyReasonerTest { + @Test + public void someOnlyTest() throws ComponentInitException, LearningProblemUnsupportedException { + // TODO: use aksw-commons-sparql instead of sparql-scala + + SortedSet<Individual> posExamples = new TreeSet<Individual>(); + posExamples.add(new Individual("http://dbpedia.org/resource/Archytas")); + posExamples.add(new Individual("http://dbpedia.org/resource/Pythagoras")); + posExamples.add(new Individual("http://dbpedia.org/resource/Philolaus")); + + SortedSet<Individual> negExamples = new TreeSet<Individual>(); + negExamples.add(new Individual("http://dbpedia.org/resource/Democritus")); + negExamples.add(new Individual("http://dbpedia.org/resource/Zeno_of_Elea")); + negExamples.add(new Individual("http://dbpedia.org/resource/Plato")); + negExamples.add(new Individual("http://dbpedia.org/resource/Socrates")); + + SortedSetTuple<Individual> examples = new SortedSetTuple<Individual>(posExamples, + negExamples); + + ComponentManager cm = ComponentManager.getInstance(); + + SparqlSimpleExtractor ks = cm.knowledgeSource(SparqlSimpleExtractor.class); + ks.setInstances(new ArrayList<String>(Datastructures.individualSetToStringSet(examples + .getCompleteSet()))); + // ks.getConfigurator().setPredefinedEndpoint("DBPEDIA"); // TODO: + // probably the official endpoint is too slow? + ks.setEndpointURL("http://dbpedia.org/sparql"); + // ks.setUseLits(false); + // ks.setUseCacheDatabase(true); + ks.setRecursionDepth(1); + ArrayList<String> ontologyUrls = new ArrayList<String>(); + ontologyUrls.add("http://downloads.dbpedia.org/3.6/dbpedia_3.6.owl"); + ks.setOntologySchemaUrls(ontologyUrls); + ks.setAboxfilter("FILTER ( !isLiteral(?o) && regex(str(?o), " + + "'^http://dbpedia.org/resource/') && " + + "! regex(str(?o), '^http://dbpedia.org/resource/Category') ) "); + ks.setTboxfilter("FILTER ( regex(str(?class), '^http://dbpedia.org/ontology/') ) . "); + + ks.init(); + + AbstractReasonerComponent rc = cm.reasoner(FastInstanceChecker.class, ks); + ((FastInstanceChecker)rc).setForAllSemantics(ForallSemantics.SomeOnly); + rc.init(); + + + PosNegLPStandard lp = cm.learningProblem(PosNegLPStandard.class, rc); + lp.setPositiveExamples(posExamples); + lp.setNegativeExamples(negExamples); + lp.setAccuracyMethod("fmeasure"); + lp.setUseApproximations(false); + lp.init(); + + CELOE la = cm.learningAlgorithm(CELOE.class, lp, rc); + // CELOEConfigurator cc = la.getConfigurator(); + la.setMaxExecutionTimeInSeconds(100); + la.init(); + RhoDRDown op = (RhoDRDown) la.getOperator(); + + op.setUseNegation(false); + op.setUseAllConstructor(true); + op.setUseCardinalityRestrictions(false); + op.setUseHasValueConstructor(true); + la.setNoisePercentage(20); + la.init(); + la.start(); + + cm.freeAllComponents(); + Description desc = la.getCurrentlyBestDescription(); + assertTrue( this.containsObjectAllRestriction(desc)); + + } + + private boolean containsObjectAllRestriction(Description d){ + if(d instanceof ObjectAllRestriction){ + return false; + } + for(Description child:d.getChildren()){ + if(!this.containsObjectAllRestriction(child)){ + return false; + } + } + return true; + } +} Property changes on: trunk/components-core/src/test/java/org/dllearner/test/junit/SomeOnlyReasonerTest.java ___________________________________________________________________ Added: svn:mime-type ## -0,0 +1 ## +text/plain \ No newline at end of property This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dc...@us...> - 2013-06-14 05:12:17
|
Revision: 3997 http://sourceforge.net/p/dl-learner/code/3997 Author: dcherix Date: 2013-06-14 05:12:12 +0000 (Fri, 14 Jun 2013) Log Message: ----------- sparql for ObjectAllRestriction added Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQueryDescriptionConvertVisitor.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQueryDescriptionConvertVisitor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQueryDescriptionConvertVisitor.java 2013-06-13 13:36:48 UTC (rev 3996) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQueryDescriptionConvertVisitor.java 2013-06-14 05:12:12 UTC (rev 3997) @@ -69,613 +69,628 @@ * */ public class SparqlQueryDescriptionConvertVisitor implements DescriptionVisitor { - - - private static Logger logger = Logger.getLogger(ComponentManager.class); - - private int limit = 5; - private int offset = -1; - private boolean labels = false; - private boolean distinct = false; - private boolean count = false; - private String customFilter = null; - - - private SortedSet<String> transitiveProperties =null; - private Map<String,Set<String>> subclassMap = null; - - private Stack<String> stack = new Stack<String>(); - private String query = ""; - private int currentObject = 0; - private List<String> foundNamedClasses = new ArrayList<String>(); - - /** - * resets internal variables - */ - private void reset(){ - currentObject = 0; - stack = new Stack<String>(); - stack.push("subject"); - query = ""; - foundNamedClasses = new ArrayList<String>() ; - } - - public SparqlQueryDescriptionConvertVisitor() { - stack.push("subject"); - } - - /** - * @param descriptionKBSyntax description which is parsed and passed to getSparqlQuery( Description description) - * @return - * @throws ParseException - */ - public String getSparqlQuery( String descriptionKBSyntax) throws ParseException { - Description description = KBParser.parseConcept(descriptionKBSyntax); - return getSparqlQuery( description); - } - - /** - * takes a description and transforms it into SPARQL - * @param description - * @return - */ - public String getSparqlQuery( Description description) { - description.accept(this); - expandSubclasses(); - String ret = ""; - String customFilterTmp = pointalize(customFilter); - query = pointalize(query); - if(count){ - ret = "SELECT count(distinct(?subject)) as ?count { "+ query + " \n"+customFilterTmp+" \n } " ; - }else{ - ret = "SELECT "+distinct()+"?subject "+((labels)?"?label":"")+" { "+labels()+ query + " \n"+customFilterTmp+"\n } " + limit(); - } - reset(); - return ret; - } - - - /** - * finalizes the patterns with a point - * @param toBePointed - * @return - */ - private static String pointalize(String toBePointed){ - if(toBePointed==null){ - return ""; - } - return (toBePointed.trim().endsWith("."))?toBePointed:toBePointed+" . "; - } - - private void expandSubclasses(){ - if(subclassMap == null){ - return; - } - int counter = 0; - int index = 0; - String var = ""; - String uriPattern = ""; - StringBuffer tmp ; - StringBuffer filter = new StringBuffer() ; - Set<String> subClasses; - for(String nc: foundNamedClasses){ - index = query.indexOf("<"+nc+">"); - subClasses = subclassMap.get(nc); - if(index == -1){ - logger.error("named class was found before, but is not in query any more?? "+nc); - }else if(subClasses != null){ - var = "?expanded"+counter; - uriPattern = "<"+nc+">"; - tmp = new StringBuffer(); - tmp.append(query.substring(0, index)); - tmp.append(var); - tmp.append(query.substring(index+(uriPattern.length()))); - query = tmp.toString(); - filter.append(makeFilter(var, subClasses, nc)); - }else{ - logger.debug("no mapping found ("+nc+") "+this.getClass().getSimpleName()); - } - - counter++; - } - query += filter.toString(); - } - - private String makeFilter(String var, Set<String> classes, String superClassUri){ - StringBuffer buf = new StringBuffer("\nFILTER ( "+var+" IN ( "); - for (String string : classes) { - buf.append("<"+string+">, "); - } - buf.append("<"+superClassUri+"> ) ). "); - return buf.toString(); - } - - private String limit() { - if (limit > 0 && offset > 0){ - return " LIMIT " + limit + " OFFSET "+offset+" "; - }else if(limit > 0 ){ - return " LIMIT " + limit + " "; - }else { - return ""; - } - } - private String labels() { - return (labels)?"\n?subject rdfs:label ?label . ":""; - } - private String distinct() { - return (distinct)?"DISTINCT ":""; - } - - /** - * @param limit <= 0 means no limit - */ - public void setLimit(int limit) { - this.limit = limit; - } - - public void noLimit() { - this.limit = -1; - } - - /** - * also retrieve labels (untested) - * - * @param labels - */ - public void setLabels(boolean labels) { - this.labels = labels; - } - - /** - * result is distinct - * @param distinct - */ - public void setDistinct(boolean distinct) { - this.distinct = distinct; - } - - /** - * virtuoso optimisation for transitive properties - * @param transitiveProperties - */ - public void setTransitiveProperties(SortedSet<String> transitiveProperties) { - this.transitiveProperties = transitiveProperties; - } - - - /** - * needed for expanding subclasses, if store does no reasoning - * @param subclassMap - */ - public void setSubclassMap(Map<String, Set<String>> subclassMap) { - this.subclassMap = subclassMap; - } - - public void setCount(boolean count) { - this.count = count; - } - - public void setOffset(int offset) { - this.offset = offset; - } - - public void setCustomFilter(String customFilter) { - this.customFilter = customFilter; - } - - public static String getSparqlQuery(String descriptionKBSyntax, int limit, boolean labels, boolean distinct) throws ParseException { - Description d = KBParser.parseConcept(descriptionKBSyntax); - return getSparqlQuery(d, limit, labels, distinct); - } - - public static String getSparqlQuery(Description description, int limit, boolean labels, boolean distinct) { - SparqlQueryDescriptionConvertVisitor visitor = new SparqlQueryDescriptionConvertVisitor(); - visitor.setDistinct(distinct); - visitor.setLabels(labels); - visitor.setLimit(limit); - return visitor.getSparqlQuery(description); - } - - /** - * COMMENT: write some more includes subclasses, costly function, because - * subclasses have to be received first. TODO mentioned method cannot be - * found by Javadoc tool conceptRewrite(String descriptionKBSyntax, - * SparqlEndpoint se, Cache c, boolean simple) - * - * @param descriptionKBSyntax - * @see #getSparqlQuery(Description description, int limit) - * @param resultLimit - * @see #getSparqlQuery(Description description, int limit) - * @param maxDepth - * @throws ParseException - */ - - public static String getSparqlQueryIncludingSubclasses(String descriptionKBSyntax, int resultLimit, - SPARQLTasks st, int maxDepth) throws ParseException { - String rewritten = SparqlQueryDescriptionConvertRDFS - .conceptRewrite(descriptionKBSyntax, st, maxDepth); - - return getSparqlQuery(rewritten, resultLimit, false, false); - - } - - - public static void testHasValue() throws Exception{ -// String ttt = "(\"http://dbpedia.org/ontology/Plant\" AND (\"http://dbpedia.org/ontology/kingdom\" hasValue \"http://dbpedia.org/resource/Plantae\" OR EXISTS \"http://dbpedia.org/ontology/family\".\"http://dbpedia.org/ontology/FloweringPlant\"))"; - String ttt = "(\"http://dbpedia.org/ontology/Plant\" AND ((\"http://dbpedia.org/ontology/kingdom\" HASVALUE \"http://dbpedia.org/resource/Plantae\") OR EXISTS \"http://dbpedia.org/ontology/family\".\"http://dbpedia.org/ontology/FloweringPlant\"))"; - SparqlQueryDescriptionConvertVisitor testVisitor = new SparqlQueryDescriptionConvertVisitor(); - String q = testVisitor.getSparqlQuery(ttt); - System.out.println(q); - Description description = KBParser.parseConcept(ttt); - System.out.println(description.toString()); - System.out.println(description.toKBSyntaxString()); - System.out.println(description.toKBSyntaxString(null,null)); - if (true) { - System.exit(0); - } - - } - public static void testTrans() throws Exception{ -// String ttt = "(\"http://dbpedia.org/ontology/Plant\" AND (\"http://dbpedia.org/ontology/kingdom\" hasValue \"http://dbpedia.org/resource/Plantae\" OR EXISTS \"http://dbpedia.org/ontology/family\".\"http://dbpedia.org/ontology/FloweringPlant\"))"; -// String ttt = "(\"http://dbpedia.org/ontology/Plant\" AND ((\"http://dbpedia.org/ontology/kingdom\" HASVALUE \"http://dbpedia.org/resource/Plantae\") OR EXISTS \"http://dbpedia.org/ontology/family\".\"http://dbpedia.org/ontology/FloweringPlant\"))"; - String ttt = "EXISTS \"http://dbpedia.org/ontology/kingdom\".\"http://dbpedia.org/resource/Plantae\""; - SparqlQueryDescriptionConvertVisitor testVisitor = new SparqlQueryDescriptionConvertVisitor(); - testVisitor.setTransitiveProperties(new TreeSet<String>(Arrays.asList(new String[]{"http://dbpedia.org/ontology/kingdom" }))); - String q = testVisitor.getSparqlQuery(ttt); - System.out.println(q); - Description description = KBParser.parseConcept(ttt); - System.out.println(description.toString()); - System.out.println(description.toKBSyntaxString()); - System.out.println(description.toKBSyntaxString(null,null)); - if (true) { - System.exit(0); - } - - } - - /** - * Used for testing the Sparql Query converter. - * - * @param args - */ - public static void main(String[] args) throws Exception{ - - try { -// testTrans(); - testHasValue(); - - - //SparqlQueryConverter.test(); - - SortedSet<String> s = new TreeSet<String>(); - HashMap<String, String> result = new HashMap<String, String>(); - HashMap<String, String> subclassMap = new HashMap<String, String>(); - subclassMap.put("http://nlp2rdf.org/ontology/Sentence","<http://nlp2rdf.org/ontology/Subsentence>"); - String conj = "(\"http://dbpedia.org/class/yago/Person100007846\" AND \"http://dbpedia.org/class/yago/Head110162991\")"; - - s.add("EXISTS \"http://dbpedia.org/property/disambiguates\".TOP"); - s.add("EXISTS \"http://dbpedia.org/property/successor\".\"http://dbpedia.org/class/yago/Person100007846\""); - s.add("EXISTS \"http://dbpedia.org/property/successor\"." + conj); - s.add("ALL \"http://dbpedia.org/property/disambiguates\".TOP"); - s.add("ALL \"http://dbpedia.org/property/successor\".\"http://dbpedia.org/class/yago/Person100007846\""); - s.add("\"http://dbpedia.org/class/yago/Person100007846\""); - s.add(conj); - s.add("(\"http://dbpedia.org/class/yago/Person100007846\" OR \"http://dbpedia.org/class/yago/Head110162991\")"); - s.add("NOT \"http://dbpedia.org/class/yago/Person100007846\""); - s.add("(\"http://dbpedia.org/class/yago/HeadOfState110164747\" AND (\"http://dbpedia.org/class/yago/Negotiator110351874\" AND \"http://dbpedia.org/class/yago/Representative110522035\"))"); - - s.clear(); -// s.add("(\"http://nlp2rdf.org/ontology/Sentence\" AND (EXISTS \"http://nlp2rdf.org/ontology/syntaxTreeHasPart\".\"http://nachhalt.sfb632.uni-potsdam.de/owl/stts.owl#Pronoun\" AND EXISTS \"http://nlp2rdf.org/ontology/syntaxTreeHasPart\".\"http://nlp2rdf.org/ontology/sentencefinalpunctuation_tag\"))"); -// s.add("(\"http://nlp2rdf.org/ontology/Sentence\" AND (\"http://nlp2rdf.org/ontology/hasLemma\" VALUE \"test\" )"); - - String prefix = "http://nlp2rdf.org/ontology/"; - String test = "(\"Sentence\" AND (EXISTS \"syntaxTreeHasPart\".\"VVPP\" AND EXISTS \"syntaxTreeHasPart\".(\"stts:AuxilliaryVerb\" AND \"hasLemma\" = werden)))"; - - ObjectProperty stp = new ObjectProperty(prefix+"syntaxTreeHasPart"); - DatatypeProperty dtp = new DatatypeProperty(prefix+"hasLemma"); - StringValueRestriction svr = new StringValueRestriction(dtp,"werden" ); - Intersection inner = new Intersection(new NamedClass(prefix+"Auxillary"), svr); - Intersection middle = new Intersection( - new ObjectSomeRestriction(stp, new NamedClass(prefix+"VVPP")), - new ObjectSomeRestriction(stp, inner)); - Intersection outer = new Intersection( - new NamedClass(prefix+"Sentence"), - middle - ); - - System.out.println(outer.toKBSyntaxString(null,null)); - System.out.println(test); - - Map<String, Set<String>> testMap = new HashMap<String, Set<String>>(); - testMap.put(prefix+"Sentence", new HashSet<String>(Arrays.asList(new String[]{"whatever","loser"}))); -// s.add(outer.toKBSyntaxString(null,null)); - SparqlQueryDescriptionConvertVisitor testVisitor = new SparqlQueryDescriptionConvertVisitor(); - testVisitor.setSubclassMap(testMap); - String q = testVisitor.getSparqlQuery(outer.toKBSyntaxString()); - System.out.println(q); - if (true) { - System.exit(0); - } -// <http://nlp2rdf.org/ontology/sentencefinalpunctuation_tag> - String query = ""; - SparqlQueryDescriptionConvertVisitor visit = new SparqlQueryDescriptionConvertVisitor(); - visit.setLabels(false); - visit.setDistinct(false); -// visit.setClassToSubclassesVirtuoso(subclassMap); - - - - for (String kbsyntax : s) { - query = visit.getSparqlQuery(kbsyntax); - result.put(kbsyntax, query); - } - System.out.println("************************"); - for (String string : result.keySet()) { - System.out.println("KBSyntayString: " + string); - System.out.println("Query:\n" + result.get(string)); - System.out.println("************************"); - } - System.out.println("Finished"); - } catch (ParseException e) { - e.printStackTrace(); - } - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .Negation) - */ - public void visit(Negation description) { - logger.trace("Negation"); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .ObjectAllRestriction) - */ - public void visit(ObjectAllRestriction description) { - logger.trace("ObjectAllRestriction"); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .ObjectSomeRestriction) - */ - public void visit(ObjectSomeRestriction description) { - logger.trace("ObjectSomeRestriction"); - String option = ""; - if(transitiveProperties!= null && transitiveProperties.contains(description.getRole().toString()) ){ - option =" OPTION (TRANSITIVE , t_in(?" + stack.peek()+"), t_out(?object" + currentObject + "), T_MIN(0), T_MAX(6), T_DIRECTION 1 , T_NO_CYCLES) "; - } - - if(description.getChild(0) instanceof Thing){ - //I removed a point here at the end - query += "\n?" + stack.peek() + " <" + description.getRole() + "> [] " + option + " "; - }else{ - query += "\n?" + stack.peek() + " <" + description.getRole() + "> ?object" + currentObject + option + " . "; - stack.push("object" + currentObject); - currentObject++; - description.getChild(0).accept(this); - stack.pop(); - } - - logger.trace(description.getRole().toString()); - logger.trace(description.getChild(0).toString()); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .Nothing) - */ - public void visit(Nothing description) { - logger.trace("Nothing"); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .Thing) - */ - public void visit(Thing description) { - logger.trace("Thing"); - - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .Intersection) - */ - public void visit(Intersection description) { - if(description.getChild(0) instanceof Thing ){ - logger.trace("Intersection with TOP"); - description.getChild(1).accept(this); - }else if(description.getChild(1) instanceof Thing ){ - logger.trace("Intersection with TOP"); - description.getChild(0).accept(this); - }else{ - logger.trace("Intersection"); - description.getChild(0).accept(this); - query += ". "; - description.getChild(1).accept(this); - } - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .Union) - */ - public void visit(Union description) { - - if(description.getChild(0) instanceof Thing ){ - logger.trace("Union with TOP"); - description.getChild(1).accept(this); - }else if(description.getChild(1) instanceof Thing ){ - logger.trace("Union with TOP"); - description.getChild(0).accept(this); - }else{ - logger.trace("Union"); - query += "{"; - description.getChild(0).accept(this); - query += "} UNION {"; - description.getChild(1).accept(this); - query += "}"; - } - - - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .ObjectMinCardinalityRestriction) - */ - public void visit(ObjectMinCardinalityRestriction description) { - logger.trace("ObjectMinCardinalityRestriction"); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .ObjectExactCardinalityRestriction) - */ - public void visit(ObjectExactCardinalityRestriction description) { - logger.trace("ObjectExactCardinalityRestriction"); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .ObjectMaxCardinalityRestriction) - */ - public void visit(ObjectMaxCardinalityRestriction description) { - logger.trace("ObjectMaxCardinalityRestriction"); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .ObjectValueRestriction) - */ - public void visit(ObjectValueRestriction description) { - ObjectProperty op = (ObjectProperty) description.getRestrictedPropertyExpression(); - Individual ind = description.getIndividual(); - query += "\n?" + stack.peek() + " <" + op.getName() + "> <" + ind.getName() + "> "; - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .DatatypeValueRestriction) - */ - public void visit(DatatypeValueRestriction description) { - logger.trace("DatatypeValueRestriction"); - query += "\n?" + stack.peek() + " <" + description.getRestrictedPropertyExpression() + "> \""+description.getValue().getLiteral()+"\" "; - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .NamedClass) - */ - public void visit(NamedClass description) { - logger.trace("NamedClass"); - query += "\n?" + stack.peek() + " a <" + description.getName() + "> "; - foundNamedClasses.add(description.getName()); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.algorithms - * .gp.ADC) - */ - public void visit(ADC description) { - logger.trace("ADC"); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .DatatypeMinCardinalityRestriction) - */ - public void visit(DatatypeMinCardinalityRestriction description) { - logger.trace("DatatypeMinCardinalityRestriction"); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .DatatypeExactCardinalityRestriction) - */ - public void visit(DatatypeExactCardinalityRestriction description) { - logger.trace("DatatypeExactCardinalityRestriction"); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .DatatypeMaxCardinalityRestriction) - */ - public void visit(DatatypeMaxCardinalityRestriction description) { - logger.trace("DatatypeMaxCardinalityRestriction"); - } - - /* - * (non-Javadoc) - * - * @see - * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl - * .DatatypeSomeRestriction) - */ - public void visit(DatatypeSomeRestriction description) { - logger.trace("DatatypeSomeRestriction"); - } - - @Override - public void visit(ObjectOneOf description) { - logger.trace("ObjectOneOf"); - - } - - - + + private static Logger logger = Logger.getLogger(ComponentManager.class); + + private int limit = 5; + private int offset = -1; + private boolean labels = false; + private boolean distinct = false; + private boolean count = false; + private String customFilter = null; + + private SortedSet<String> transitiveProperties = null; + private Map<String, Set<String>> subclassMap = null; + + private Stack<String> stack = new Stack<String>(); + private String query = ""; + private int currentObject = 0; + private List<String> foundNamedClasses = new ArrayList<String>(); + + /** + * resets internal variables + */ + private void reset() { + currentObject = 0; + stack = new Stack<String>(); + stack.push("subject"); + query = ""; + foundNamedClasses = new ArrayList<String>(); + } + + public SparqlQueryDescriptionConvertVisitor() { + stack.push("subject"); + } + + /** + * @param descriptionKBSyntax + * description which is parsed and passed to getSparqlQuery( + * Description description) + * @return + * @throws ParseException + */ + public String getSparqlQuery(String descriptionKBSyntax) throws ParseException { + Description description = KBParser.parseConcept(descriptionKBSyntax); + return getSparqlQuery(description); + } + + /** + * takes a description and transforms it into SPARQL + * + * @param description + * @return + */ + public String getSparqlQuery(Description description) { + description.accept(this); + expandSubclasses(); + String ret = ""; + String customFilterTmp = pointalize(customFilter); + query = pointalize(query); + if (count) { + ret = "SELECT count(distinct(?subject)) as ?count { " + query + " \n" + + customFilterTmp + " \n } "; + } else { + ret = "SELECT " + distinct() + "?subject " + ((labels) ? "?label" : "") + " { " + + labels() + query + " \n" + customFilterTmp + "\n } " + limit(); + } + reset(); + return ret; + } + + /** + * finalizes the patterns with a point + * + * @param toBePointed + * @return + */ + private static String pointalize(String toBePointed) { + if (toBePointed == null) { + return ""; + } + return (toBePointed.trim().endsWith(".")) ? toBePointed : toBePointed + " . "; + } + + private void expandSubclasses() { + if (subclassMap == null) { + return; + } + int counter = 0; + int index = 0; + String var = ""; + String uriPattern = ""; + StringBuffer tmp; + StringBuffer filter = new StringBuffer(); + Set<String> subClasses; + for (String nc : foundNamedClasses) { + index = query.indexOf("<" + nc + ">"); + subClasses = subclassMap.get(nc); + if (index == -1) { + logger.error("named class was found before, but is not in query any more?? " + nc); + } else if (subClasses != null) { + var = "?expanded" + counter; + uriPattern = "<" + nc + ">"; + tmp = new StringBuffer(); + tmp.append(query.substring(0, index)); + tmp.append(var); + tmp.append(query.substring(index + (uriPattern.length()))); + query = tmp.toString(); + filter.append(makeFilter(var, subClasses, nc)); + } else { + logger.debug("no mapping found (" + nc + ") " + this.getClass().getSimpleName()); + } + + counter++; + } + query += filter.toString(); + } + + private String makeFilter(String var, Set<String> classes, String superClassUri) { + StringBuffer buf = new StringBuffer("\nFILTER ( " + var + " IN ( "); + for (String string : classes) { + buf.append("<" + string + ">, "); + } + buf.append("<" + superClassUri + "> ) ). "); + return buf.toString(); + } + + private String limit() { + if (limit > 0 && offset > 0) { + return " LIMIT " + limit + " OFFSET " + offset + " "; + } else if (limit > 0) { + return " LIMIT " + limit + " "; + } else { + return ""; + } + } + + private String labels() { + return (labels) ? "\n?subject rdfs:label ?label . " : ""; + } + + private String distinct() { + return (distinct) ? "DISTINCT " : ""; + } + + /** + * @param limit + * <= 0 means no limit + */ + public void setLimit(int limit) { + this.limit = limit; + } + + public void noLimit() { + this.limit = -1; + } + + /** + * also retrieve labels (untested) + * + * @param labels + */ + public void setLabels(boolean labels) { + this.labels = labels; + } + + /** + * result is distinct + * + * @param distinct + */ + public void setDistinct(boolean distinct) { + this.distinct = distinct; + } + + /** + * virtuoso optimisation for transitive properties + * + * @param transitiveProperties + */ + public void setTransitiveProperties(SortedSet<String> transitiveProperties) { + this.transitiveProperties = transitiveProperties; + } + + /** + * needed for expanding subclasses, if store does no reasoning + * + * @param subclassMap + */ + public void setSubclassMap(Map<String, Set<String>> subclassMap) { + this.subclassMap = subclassMap; + } + + public void setCount(boolean count) { + this.count = count; + } + + public void setOffset(int offset) { + this.offset = offset; + } + + public void setCustomFilter(String customFilter) { + this.customFilter = customFilter; + } + + public static String getSparqlQuery(String descriptionKBSyntax, int limit, boolean labels, + boolean distinct) throws ParseException { + Description d = KBParser.parseConcept(descriptionKBSyntax); + return getSparqlQuery(d, limit, labels, distinct); + } + + public static String getSparqlQuery(Description description, int limit, boolean labels, + boolean distinct) { + SparqlQueryDescriptionConvertVisitor visitor = new SparqlQueryDescriptionConvertVisitor(); + visitor.setDistinct(distinct); + visitor.setLabels(labels); + visitor.setLimit(limit); + return visitor.getSparqlQuery(description); + } + + /** + * COMMENT: write some more includes subclasses, costly function, because + * subclasses have to be received first. TODO mentioned method cannot be + * found by Javadoc tool conceptRewrite(String descriptionKBSyntax, + * SparqlEndpoint se, Cache c, boolean simple) + * + * @param descriptionKBSyntax + * @see #getSparqlQuery(Description description, int limit) + * @param resultLimit + * @see #getSparqlQuery(Description description, int limit) + * @param maxDepth + * @throws ParseException + */ + + public static String getSparqlQueryIncludingSubclasses(String descriptionKBSyntax, + int resultLimit, SPARQLTasks st, int maxDepth) throws ParseException { + String rewritten = SparqlQueryDescriptionConvertRDFS.conceptRewrite(descriptionKBSyntax, + st, maxDepth); + + return getSparqlQuery(rewritten, resultLimit, false, false); + + } + + public static void testHasValue() throws Exception { + // String ttt = + // "(\"http://dbpedia.org/ontology/Plant\" AND (\"http://dbpedia.org/ontology/kingdom\" hasValue \"http://dbpedia.org/resource/Plantae\" OR EXISTS \"http://dbpedia.org/ontology/family\".\"http://dbpedia.org/ontology/FloweringPlant\"))"; + String ttt = "(\"http://dbpedia.org/ontology/Plant\" AND ((\"http://dbpedia.org/ontology/kingdom\" HASVALUE \"http://dbpedia.org/resource/Plantae\") OR EXISTS \"http://dbpedia.org/ontology/family\".\"http://dbpedia.org/ontology/FloweringPlant\"))"; + SparqlQueryDescriptionConvertVisitor testVisitor = new SparqlQueryDescriptionConvertVisitor(); + String q = testVisitor.getSparqlQuery(ttt); + System.out.println(q); + Description description = KBParser.parseConcept(ttt); + System.out.println(description.toString()); + System.out.println(description.toKBSyntaxString()); + System.out.println(description.toKBSyntaxString(null, null)); + if (true) { + System.exit(0); + } + + } + + public static void testTrans() throws Exception { + // String ttt = + // "(\"http://dbpedia.org/ontology/Plant\" AND (\"http://dbpedia.org/ontology/kingdom\" hasValue \"http://dbpedia.org/resource/Plantae\" OR EXISTS \"http://dbpedia.org/ontology/family\".\"http://dbpedia.org/ontology/FloweringPlant\"))"; + // String ttt = + // "(\"http://dbpedia.org/ontology/Plant\" AND ((\"http://dbpedia.org/ontology/kingdom\" HASVALUE \"http://dbpedia.org/resource/Plantae\") OR EXISTS \"http://dbpedia.org/ontology/family\".\"http://dbpedia.org/ontology/FloweringPlant\"))"; + String ttt = "EXISTS \"http://dbpedia.org/ontology/kingdom\".\"http://dbpedia.org/resource/Plantae\""; + SparqlQueryDescriptionConvertVisitor testVisitor = new SparqlQueryDescriptionConvertVisitor(); + testVisitor.setTransitiveProperties(new TreeSet<String>(Arrays + .asList(new String[] { "http://dbpedia.org/ontology/kingdom" }))); + String q = testVisitor.getSparqlQuery(ttt); + System.out.println(q); + Description description = KBParser.parseConcept(ttt); + System.out.println(description.toString()); + System.out.println(description.toKBSyntaxString()); + System.out.println(description.toKBSyntaxString(null, null)); + if (true) { + System.exit(0); + } + + } + + /** + * Used for testing the Sparql Query converter. + * + * @param args + */ + public static void main(String[] args) throws Exception { + + try { + // testTrans(); + testHasValue(); + + // SparqlQueryConverter.test(); + + SortedSet<String> s = new TreeSet<String>(); + HashMap<String, String> result = new HashMap<String, String>(); + HashMap<String, String> subclassMap = new HashMap<String, String>(); + subclassMap.put("http://nlp2rdf.org/ontology/Sentence", + "<http://nlp2rdf.org/ontology/Subsentence>"); + String conj = "(\"http://dbpedia.org/class/yago/Person100007846\" AND \"http://dbpedia.org/class/yago/Head110162991\")"; + + s.add("EXISTS \"http://dbpedia.org/property/disambiguates\".TOP"); + s.add("EXISTS \"http://dbpedia.org/property/successor\".\"http://dbpedia.org/class/yago/Person100007846\""); + s.add("EXISTS \"http://dbpedia.org/property/successor\"." + conj); + s.add("ALL \"http://dbpedia.org/property/disambiguates\".TOP"); + s.add("ALL \"http://dbpedia.org/property/successor\".\"http://dbpedia.org/class/yago/Person100007846\""); + s.add("\"http://dbpedia.org/class/yago/Person100007846\""); + s.add(conj); + s.add("(\"http://dbpedia.org/class/yago/Person100007846\" OR \"http://dbpedia.org/class/yago/Head110162991\")"); + s.add("NOT \"http://dbpedia.org/class/yago/Person100007846\""); + s.add("(\"http://dbpedia.org/class/yago/HeadOfState110164747\" AND (\"http://dbpedia.org/class/yago/Negotiator110351874\" AND \"http://dbpedia.org/class/yago/Representative110522035\"))"); + + s.clear(); + // s.add("(\"http://nlp2rdf.org/ontology/Sentence\" AND (EXISTS \"http://nlp2rdf.org/ontology/syntaxTreeHasPart\".\"http://nachhalt.sfb632.uni-potsdam.de/owl/stts.owl#Pronoun\" AND EXISTS \"http://nlp2rdf.org/ontology/syntaxTreeHasPart\".\"http://nlp2rdf.org/ontology/sentencefinalpunctuation_tag\"))"); + // s.add("(\"http://nlp2rdf.org/ontology/Sentence\" AND (\"http://nlp2rdf.org/ontology/hasLemma\" VALUE \"test\" )"); + + String prefix = "http://nlp2rdf.org/ontology/"; + String test = "(\"Sentence\" AND (EXISTS \"syntaxTreeHasPart\".\"VVPP\" AND EXISTS \"syntaxTreeHasPart\".(\"stts:AuxilliaryVerb\" AND \"hasLemma\" = werden)))"; + + ObjectProperty stp = new ObjectProperty(prefix + "syntaxTreeHasPart"); + DatatypeProperty dtp = new DatatypeProperty(prefix + "hasLemma"); + StringValueRestriction svr = new StringValueRestriction(dtp, "werden"); + Intersection inner = new Intersection(new NamedClass(prefix + "Auxillary"), svr); + Intersection middle = new Intersection(new ObjectSomeRestriction(stp, new NamedClass( + prefix + "VVPP")), new ObjectSomeRestriction(stp, inner)); + Intersection outer = new Intersection(new NamedClass(prefix + "Sentence"), middle); + + System.out.println(outer.toKBSyntaxString(null, null)); + System.out.println(test); + + Map<String, Set<String>> testMap = new HashMap<String, Set<String>>(); + testMap.put(prefix + "Sentence", + new HashSet<String>(Arrays.asList(new String[] { "whatever", "loser" }))); + // s.add(outer.toKBSyntaxString(null,null)); + SparqlQueryDescriptionConvertVisitor testVisitor = new SparqlQueryDescriptionConvertVisitor(); + testVisitor.setSubclassMap(testMap); + String q = testVisitor.getSparqlQuery(outer.toKBSyntaxString()); + System.out.println(q); + if (true) { + System.exit(0); + } + // <http://nlp2rdf.org/ontology/sentencefinalpunctuation_tag> + String query = ""; + SparqlQueryDescriptionConvertVisitor visit = new SparqlQueryDescriptionConvertVisitor(); + visit.setLabels(false); + visit.setDistinct(false); + // visit.setClassToSubclassesVirtuoso(subclassMap); + + for (String kbsyntax : s) { + query = visit.getSparqlQuery(kbsyntax); + result.put(kbsyntax, query); + } + System.out.println("************************"); + for (String string : result.keySet()) { + System.out.println("KBSyntayString: " + string); + System.out.println("Query:\n" + result.get(string)); + System.out.println("************************"); + } + System.out.println("Finished"); + } catch (ParseException e) { + e.printStackTrace(); + } + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .Negation) + */ + public void visit(Negation description) { + logger.trace("Negation"); + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .ObjectAllRestriction) + */ + public void visit(ObjectAllRestriction description) { + logger.trace("ObjectAllRestriction"); + query+="\n ?"+stack.peek()+ " <"+description.getRole()+"> ?object"+currentObject; + customFilter += "\n FILTERS NOT EXISTS { ?object a ?object" + currentObject++ + + " FILTER (?object" + currentObject + " != <" + + description.getChild(0) + "> ) }"; + currentObject++; + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .ObjectSomeRestriction) + */ + public void visit(ObjectSomeRestriction description) { + logger.trace("ObjectSomeRestriction"); + String option = ""; + if (transitiveProperties != null + && transitiveProperties.contains(description.getRole().toString())) { + option = " OPTION (TRANSITIVE , t_in(?" + stack.peek() + "), t_out(?object" + + currentObject + "), T_MIN(0), T_MAX(6), T_DIRECTION 1 , T_NO_CYCLES) "; + } + + if (description.getChild(0) instanceof Thing) { + // I removed a point here at the end + query += "\n?" + stack.peek() + " <" + description.getRole() + "> [] " + option + " "; + } else { + query += "\n?" + stack.peek() + " <" + description.getRole() + "> ?object" + + currentObject + option + " . "; + stack.push("object" + currentObject); + currentObject++; + description.getChild(0).accept(this); + stack.pop(); + } + + logger.trace(description.getRole().toString()); + logger.trace(description.getChild(0).toString()); + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .Nothing) + */ + public void visit(Nothing description) { + logger.trace("Nothing"); + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .Thing) + */ + public void visit(Thing description) { + logger.trace("Thing"); + + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .Intersection) + */ + public void visit(Intersection description) { + if (description.getChild(0) instanceof Thing) { + logger.trace("Intersection with TOP"); + description.getChild(1).accept(this); + } else if (description.getChild(1) instanceof Thing) { + logger.trace("Intersection with TOP"); + description.getChild(0).accept(this); + } else { + logger.trace("Intersection"); + description.getChild(0).accept(this); + query += ". "; + description.getChild(1).accept(this); + } + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .Union) + */ + public void visit(Union description) { + + if (description.getChild(0) instanceof Thing) { + logger.trace("Union with TOP"); + description.getChild(1).accept(this); + } else if (description.getChild(1) instanceof Thing) { + logger.trace("Union with TOP"); + description.getChild(0).accept(this); + } else { + logger.trace("Union"); + query += "{"; + description.getChild(0).accept(this); + query += "} UNION {"; + description.getChild(1).accept(this); + query += "}"; + } + + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .ObjectMinCardinalityRestriction) + */ + public void visit(ObjectMinCardinalityRestriction description) { + logger.trace("ObjectMinCardinalityRestriction"); + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .ObjectExactCardinalityRestriction) + */ + public void visit(ObjectExactCardinalityRestriction description) { + logger.trace("ObjectExactCardinalityRestriction"); + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .ObjectMaxCardinalityRestriction) + */ + public void visit(ObjectMaxCardinalityRestriction description) { + logger.trace("ObjectMaxCardinalityRestriction"); + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .ObjectValueRestriction) + */ + public void visit(ObjectValueRestriction description) { + ObjectProperty op = (ObjectProperty) description.getRestrictedPropertyExpression(); + Individual ind = description.getIndividual(); + query += "\n?" + stack.peek() + " <" + op.getName() + "> <" + ind.getName() + "> "; + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .DatatypeValueRestriction) + */ + public void visit(DatatypeValueRestriction description) { + logger.trace("DatatypeValueRestriction"); + query += "\n?" + stack.peek() + " <" + description.getRestrictedPropertyExpression() + + "> \"" + description.getValue().getLiteral() + "\" "; + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .NamedClass) + */ + public void visit(NamedClass description) { + logger.trace("NamedClass"); + query += "\n?" + stack.peek() + " a <" + description.getName() + "> "; + foundNamedClasses.add(description.getName()); + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.algorithms + * .gp.ADC) + */ + public void visit(ADC description) { + logger.trace("ADC"); + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .DatatypeMinCardinalityRestriction) + */ + public void visit(DatatypeMinCardinalityRestriction description) { + logger.trace("DatatypeMinCardinalityRestriction"); + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .DatatypeExactCardinalityRestriction) + */ + public void visit(DatatypeExactCardinalityRestriction description) { + logger.trace("DatatypeExactCardinalityRestriction"); + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .DatatypeMaxCardinalityRestriction) + */ + public void visit(DatatypeMaxCardinalityRestriction description) { + logger.trace("DatatypeMaxCardinalityRestriction"); + } + + /* + * (non-Javadoc) + * + * @see + * org.dllearner.core.owl.DescriptionVisitor#visit(org.dllearner.core.owl + * .DatatypeSomeRestriction) + */ + public void visit(DatatypeSomeRestriction description) { + logger.trace("DatatypeSomeRestriction"); + } + + @Override + public void visit(ObjectOneOf description) { + logger.trace("ObjectOneOf"); + + } + } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-13 13:36:51
|
Revision: 3996 http://sourceforge.net/p/dl-learner/code/3996 Author: lorenz_b Date: 2013-06-13 13:36:48 +0000 (Thu, 13 Jun 2013) Log Message: ----------- Improved enrichment. Modified Paths: -------------- trunk/interfaces/pom.xml trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java Modified: trunk/interfaces/pom.xml =================================================================== --- trunk/interfaces/pom.xml 2013-06-13 13:34:00 UTC (rev 3995) +++ trunk/interfaces/pom.xml 2013-06-13 13:36:48 UTC (rev 3996) @@ -521,6 +521,10 @@ <!-- Exclude Project-D from Project-B --> <artifactId>slf4j-log4j12</artifactId> </exclusion> + <exclusion> + <groupId>net.sourceforge</groupId> + <artifactId>owlapi</artifactId> + </exclusion> </exclusions> </dependency> <dependency> Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-06-13 13:34:00 UTC (rev 3995) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-06-13 13:36:48 UTC (rev 3996) @@ -67,6 +67,7 @@ import org.aksw.commons.jena_owlapi.Conversion; import org.aksw.jena_sparql_api.core.QueryExecutionFactory; +import org.apache.jena.riot.Lang; import org.apache.jena.riot.checker.CheckerLiterals; import org.apache.jena.riot.system.ErrorHandlerFactory; import org.apache.log4j.ConsoleAppender; @@ -345,7 +346,7 @@ // loop over all entities and call appropriate algorithms Set<NamedClass> classes = reasoner.getTypes();//st.getAllClasses(); - filterByNamespaces(classes);//classes = Sets.newHashSet(new NamedClass("http://dbpedia.org/ontology/AdministrativeRegion")); + filterByNamespaces(classes);//classes = Sets.newHashSet(new NamedClass("http://dbpedia.org/ontology/Arachnid")); int entities = 0; for(NamedClass nc : classes) { try { @@ -856,16 +857,14 @@ model.write(fos, "TURTLE", null); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); OWLOntology ontology = man.loadOntologyFromOntologyDocument(new ByteArrayInputStream(baos.toByteArray())); - try { - man.saveOntology(ontology, new TurtleOntologyFormat(), new FileOutputStream("error.owl")); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } catch (FileNotFoundException e) { - e.printStackTrace(); - } return ontology; } catch (OWLOntologyCreationException e) { e.printStackTrace(); + try { + model.write(new FileOutputStream("parse-error.ttl"), "TURTLE", null); + } catch (FileNotFoundException e1) { + e1.printStackTrace(); + } } return null; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-13 13:34:03
|
Revision: 3995 http://sourceforge.net/p/dl-learner/code/3995 Author: lorenz_b Date: 2013-06-13 13:34:00 +0000 (Thu, 13 Jun 2013) Log Message: ----------- Cleaned up pom. Optimzed cache usage. Modified Paths: -------------- trunk/components-core/pom.xml trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverter.java Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2013-06-13 10:28:10 UTC (rev 3994) +++ trunk/components-core/pom.xml 2013-06-13 13:34:00 UTC (rev 3995) @@ -102,37 +102,8 @@ <dependency> <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi-distribution</artifactId> - <type>pom</type> + <version>3.4.4</version> </dependency> - <dependency> - <groupId>net.sourceforge.owlapi</groupId> - <artifactId>owlapi-dlsyntax</artifactId> - <version>3.3</version> - </dependency> - <dependency> - <groupId>net.sourceforge.owlapi</groupId> - <artifactId>owlapi-functional</artifactId> - <version>3.3</version> - </dependency> - - <dependency> - <groupId>net.sourceforge.owlapi</groupId> - <artifactId>owlapi-reasoner</artifactId> -</dependency> -<dependency> - <groupId>net.sourceforge.owlapi</groupId> - <artifactId>owlapi-debugging</artifactId> - <version>3.3</version> -</dependency> -<dependency> - <groupId>net.sourceforge.owlapi</groupId> - <artifactId>owlapi-modularity</artifactId> - <version>3.3</version> -</dependency> -<dependency> - <groupId>net.sourceforge.owlapi</groupId> - <artifactId>owlapi-util</artifactId> - </dependency> <!-- THIS IS FROM THE UNIBAS REPO --> <dependency> Modified: trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java 2013-06-13 10:28:10 UTC (rev 3994) +++ trunk/components-core/src/main/java/org/dllearner/kb/OWLAPIOntology.java 2013-06-13 13:34:00 UTC (rev 3995) @@ -21,6 +21,7 @@ import java.io.File; import java.net.URI; +import java.util.Collections; import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.OntologyFormat; @@ -39,10 +40,12 @@ private byte[] ontologyBytes; private OntologyToByteConverter converter = new SimpleOntologyToByteConverter(); + private OWLOntology ontology; - public OWLAPIOntology(OWLOntology onto) { - ontologyBytes = converter.convert(onto); + public OWLAPIOntology(OWLOntology ontology) { + this.ontology = ontology; +// ontologyBytes = converter.convert(ontology); } public static String getName() { @@ -51,7 +54,20 @@ @Override public OWLOntology createOWLOntology(OWLOntologyManager manager) { - return converter.convert(ontologyBytes, manager); + OWLOntology copy = null; + try { + IRI iri; + if(ontology.getOntologyID().isAnonymous()){ + iri = IRI.generateDocumentIRI(); + } else { + iri = ontology.getOntologyID().getOntologyIRI(); + } + copy = manager.createOntology(iri, Collections.singleton(ontology)); + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } +// return converter.convert(ontologyBytes, manager); + return copy; } @Override Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2013-06-13 10:28:10 UTC (rev 3994) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2013-06-13 13:34:00 UTC (rev 3995) @@ -11,7 +11,6 @@ import org.aksw.jena_sparql_api.cache.extra.CacheEx; import org.aksw.jena_sparql_api.cache.extra.CacheExImpl; import org.aksw.jena_sparql_api.core.QueryExecutionFactory; -//import org.aksw.jena_sparql_api.http.QueryExecutionFactoryHttp; import org.aksw.jena_sparql_api.model.QueryExecutionFactoryModel; import org.aksw.jena_sparql_api.pagination.core.QueryExecutionFactoryPaginated; import org.apache.log4j.Level; @@ -19,6 +18,7 @@ import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.rdf.model.Model; +//import org.aksw.jena_sparql_api.http.QueryExecutionFactoryHttp; //import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; public class ConciseBoundedDescriptionGeneratorImpl implements ConciseBoundedDescriptionGenerator{ @@ -27,34 +27,42 @@ private int chunkSize = 0; - private ExtractionDBCache cache; - private SparqlEndpoint endpoint; private Model baseModel; private List<String> namespaces; + private static final int MAX_RECURSION_DEPTH_DEFAULT = 1; private int maxRecursionDepth = 1; - private String cacheDir; + private QueryExecutionFactory qef; public ConciseBoundedDescriptionGeneratorImpl(SparqlEndpoint endpoint, ExtractionDBCache cache) { - this.endpoint = endpoint; - this.cache = cache; + this(endpoint, cache, MAX_RECURSION_DEPTH_DEFAULT); } public ConciseBoundedDescriptionGeneratorImpl(SparqlEndpoint endpoint, ExtractionDBCache cache, int maxRecursionDepth) { - this.endpoint = endpoint; - this.cache = cache; - this.maxRecursionDepth = maxRecursionDepth; + this(endpoint, cache.getCacheDirectory(), maxRecursionDepth); } public ConciseBoundedDescriptionGeneratorImpl(SparqlEndpoint endpoint, String cacheDir, int maxRecursionDepth) { - this.endpoint = endpoint; - this.cacheDir = cacheDir; this.maxRecursionDepth = maxRecursionDepth; + + qef = new QueryExecutionFactoryHttp(endpoint.getURL().toString(), endpoint.getDefaultGraphURIs()); + if(cacheDir != null){ + try { + long timeToLive = TimeUnit.DAYS.toMillis(30); + CacheCoreEx cacheBackend = CacheCoreH2.create(cacheDir, timeToLive, true); + CacheEx cacheFrontend = new CacheExImpl(cacheBackend); + qef = new QueryExecutionFactoryCacheEx(qef, cacheFrontend); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + qef = new QueryExecutionFactoryPaginated(qef, 10000); } public ConciseBoundedDescriptionGeneratorImpl(SparqlEndpoint endpoint, String cacheDir) { - this.endpoint = endpoint; - this.cacheDir = cacheDir; + this(endpoint, cacheDir, MAX_RECURSION_DEPTH_DEFAULT); } public ConciseBoundedDescriptionGeneratorImpl(SparqlEndpoint endpoint) { @@ -63,6 +71,8 @@ public ConciseBoundedDescriptionGeneratorImpl(Model model) { this.baseModel = model; + + qef = new QueryExecutionFactoryModel(baseModel); } public Model getConciseBoundedDescription(String resourceURI){ @@ -79,25 +89,6 @@ private Model getModelChunked(String resource, int depth){ String query = makeConstructQueryOptional(resource, chunkSize, 0, depth); - QueryExecutionFactory qef; - if(endpoint != null){ - qef = new QueryExecutionFactoryHttp(endpoint.getURL().toString(), endpoint.getDefaultGraphURIs()); - if(cacheDir != null){ - try { - long timeToLive = TimeUnit.DAYS.toMillis(30); - CacheCoreEx cacheBackend = CacheCoreH2.create(cacheDir, timeToLive, true); - CacheEx cacheFrontend = new CacheExImpl(cacheBackend); - qef = new QueryExecutionFactoryCacheEx(qef, cacheFrontend); - } catch (ClassNotFoundException e) { - e.printStackTrace(); - } catch (SQLException e) { - e.printStackTrace(); - } - } - qef = new QueryExecutionFactoryPaginated(qef, 10000); - } else { - qef = new QueryExecutionFactoryModel(baseModel); - } QueryExecution qe = qef.createQueryExecution(query); Model model = qe.execConstruct(); return model; Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-13 10:28:10 UTC (rev 3994) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-13 13:34:00 UTC (rev 3995) @@ -19,7 +19,6 @@ package org.dllearner.reasoning; -import java.net.SocketTimeoutException; import java.net.URL; import java.sql.SQLException; import java.util.ArrayList; @@ -41,6 +40,7 @@ import org.aksw.jena_sparql_api.cache.extra.CacheExImpl; import org.aksw.jena_sparql_api.core.QueryExecutionFactory; import org.aksw.jena_sparql_api.http.QueryExecutionFactoryHttp; +import org.aksw.jena_sparql_api.model.QueryExecutionFactoryModel; import org.aksw.jena_sparql_api.pagination.core.QueryExecutionFactoryPaginated; import org.dllearner.core.ComponentAnn; import org.dllearner.core.IndividualReasoner; @@ -81,9 +81,6 @@ import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Resource; -import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; -import com.hp.hpl.jena.sparql.engine.http.QueryExceptionHTTP; -import com.hp.hpl.jena.sparql.resultset.ResultSetMem; import com.hp.hpl.jena.vocabulary.OWL; import com.hp.hpl.jena.vocabulary.OWL2; import com.hp.hpl.jena.vocabulary.RDF; @@ -98,6 +95,7 @@ private boolean useCache = true; private ExtractionDBCache cache; + private QueryExecutionFactory qef; private SparqlEndpointKS ks; private ClassHierarchy hierarchy; @@ -128,6 +126,27 @@ classPopularityMap = new HashMap<NamedClass, Integer>(); objectPropertyPopularityMap = new HashMap<ObjectProperty, Integer>(); + + if(ks.isRemote()){ + SparqlEndpoint endpoint = ks.getEndpoint(); + qef = new QueryExecutionFactoryHttp(endpoint.getURL().toString(), endpoint.getDefaultGraphURIs()); + if(cache != null){ + try { + long timeToLive = TimeUnit.DAYS.toMillis(30); + CacheCoreEx cacheBackend = CacheCoreH2.create(cache.getCacheDirectory(), timeToLive, true); + CacheEx cacheFrontend = new CacheExImpl(cacheBackend); + qef = new QueryExecutionFactoryCacheEx(qef, cacheFrontend); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } catch (SQLException e) { + e.printStackTrace(); + } + } + qef = new QueryExecutionFactoryPaginated(qef, 10000); + + } else { + qef = new QueryExecutionFactoryModel(((LocalModelBasedSparqlEndpointKS)ks).getModel()); + } } public SPARQLReasoner(OntModel model) { @@ -1362,58 +1381,25 @@ private ResultSet executeSelectQuery(String query){ logger.debug("Sending query \n {}", query); - ResultSet rs = null; - if(ks.isRemote()){ - SparqlEndpoint endpoint = ks.getEndpoint(); - QueryExecutionFactory qef = new QueryExecutionFactoryHttp(endpoint.getURL().toString(), endpoint.getDefaultGraphURIs()); - if(cache != null){ - try { - long timeToLive = TimeUnit.DAYS.toMillis(30); - CacheCoreEx cacheBackend = CacheCoreH2.create(cache.getCacheDirectory(), timeToLive, true); - CacheEx cacheFrontend = new CacheExImpl(cacheBackend); - qef = new QueryExecutionFactoryCacheEx(qef, cacheFrontend); - } catch (ClassNotFoundException e) { - e.printStackTrace(); - } catch (SQLException e) { - e.printStackTrace(); - } - } - qef = new QueryExecutionFactoryPaginated(qef, 10000); - QueryExecution qe = qef.createQueryExecution(query); - rs = qe.execSelect(); - } else { - QueryExecution qExec = com.hp.hpl.jena.query.QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); - rs = qExec.execSelect(); - } + QueryExecution qe = qef.createQueryExecution(query); + ResultSet rs = qe.execSelect(); return rs; } private ResultSet executeSelectQuery(String query, long timeout){ logger.debug("Sending query \n {}", query); - ResultSet rs = null; - if(ks.isRemote()){ - SparqlEndpoint endpoint = ((SparqlEndpointKS) ks).getEndpoint(); - QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), - query); - queryExecution.setTimeout(timeout); - queryExecution.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); - queryExecution.setNamedGraphURIs(endpoint.getNamedGraphURIs()); - try { - rs = queryExecution.execSelect(); - } catch (QueryExceptionHTTP e) { - if(e.getCause() instanceof SocketTimeoutException){ - logger.warn("Got timeout"); - } else { - logger.error("Exception executing query", e); - } - rs = new ResultSetMem(); - } - } else { - QueryExecution qExec = com.hp.hpl.jena.query.QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); - rs = qExec.execSelect(); - } + QueryExecution qe = qef.createQueryExecution(query); + qe.setTimeout(timeout); + ResultSet rs = qe.execSelect(); return rs; } + + private boolean executeAskQuery(String query){ + logger.debug("Sending query \n {}", query); + QueryExecution qe = qef.createQueryExecution(query); + boolean ret = qe.execAsk(); + return ret; + } /** * Returns TRUE if the class hierarchy was computed before. @@ -1431,27 +1417,6 @@ this.useCache = useCache; } - private boolean executeAskQuery(String query){ - boolean ret; - if(ks.isRemote()){ - QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); - for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { - queryExecution.addDefaultGraph(dgu); - } - for (String ngu : ks.getEndpoint().getNamedGraphURIs()) { - queryExecution.addNamedGraph(ngu); - } - ret = queryExecution.execAsk(); - - } else { - QueryExecution qExec = com.hp.hpl.jena.query.QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); - ret = qExec.execAsk(); - } - - return ret; - } - - public static void main(String[] args) throws Exception{ // QueryEngineHTTP e = new QueryEngineHTTP("http://bibleontology.com/sparql/index.jsp", // "SELECT DISTINCT ?type WHERE {?s a ?type) LIMIT 10"); Modified: trunk/components-core/src/main/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverter.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverter.java 2013-06-13 10:28:10 UTC (rev 3994) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/SimpleOntologyToByteConverter.java 2013-06-13 13:34:00 UTC (rev 3995) @@ -1,5 +1,8 @@ package org.dllearner.utilities.owl; +import org.coode.owlapi.turtle.TurtleOntologyFormat; +import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; +import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyManager; @@ -7,7 +10,9 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.FileOutputStream; import java.io.IOException; +import java.util.Set; /** * Created by IntelliJ IDEA. @@ -26,7 +31,7 @@ OWLOntologyManager manager = ontology.getOWLOntologyManager(); try { - manager.saveOntology(ontology,baos); + manager.saveOntology(ontology, baos); baos.close(); } catch (OWLOntologyStorageException e) { throw new RuntimeException(e); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2013-06-13 10:28:13
|
Revision: 3994 http://sourceforge.net/p/dl-learner/code/3994 Author: jenslehmann Date: 2013-06-13 10:28:10 +0000 (Thu, 13 Jun 2013) Log Message: ----------- configurable semantics for all quantor in fast instance checker Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2013-06-13 07:46:18 UTC (rev 3993) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2013-06-13 10:28:10 UTC (rev 3994) @@ -137,8 +137,10 @@ "to return all those which do not have an r-filler not in C. The domain semantics is to use those" + "which are in the domain of r and do not have an r-filler not in C. The forallExists semantics is to"+ "use those which have at least one r-filler and do not have an r-filler not in C.",defaultValue = "standard",propertyEditorClass = StringTrimmerEditor.class) - private String forAllSemantics; + private ForallSemantics forallSemantics = ForallSemantics.Standard; + public enum ForallSemantics { Standard, SomeOnly } + /** * Creates an instance of the fast instance checker. */ @@ -400,7 +402,11 @@ } SortedSet<Individual> roleFillers = opPos.get(op).get(individual); if (roleFillers == null) { - return true; + if(forallSemantics == ForallSemantics.Standard) { + return true; + } else { + return false; + } } for (Individual roleFiller : roleFillers) { if (!hasTypeImpl(child, roleFiller)) { @@ -1124,11 +1130,13 @@ this.defaultNegation = defaultNegation; } - public String getForAllSemantics() { - return forAllSemantics; - } + public ForallSemantics getForAllSemantics() { + return forallSemantics; + } - public void setForAllSemantics(String forAllSemantics) { - this.forAllSemantics = forAllSemantics; - } + public void setForAllSemantics(ForallSemantics forallSemantics) { + this.forallSemantics = forallSemantics; + } + + } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-13 07:46:21
|
Revision: 3993 http://sourceforge.net/p/dl-learner/code/3993 Author: lorenz_b Date: 2013-06-13 07:46:18 +0000 (Thu, 13 Jun 2013) Log Message: ----------- Updated SPARQL API. Modified Paths: -------------- trunk/components-core/pom.xml trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/NBR.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java trunk/pom.xml trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2013-06-12 09:13:43 UTC (rev 3992) +++ trunk/components-core/pom.xml 2013-06-13 07:46:18 UTC (rev 3993) @@ -91,28 +91,6 @@ <dependencies> <dependency> - <groupId>org.aksw.commons</groupId> - <artifactId>sparql</artifactId> - <exclusions> - <exclusion> - <artifactId>slf4j-api</artifactId> - <groupId>org.slf4j</groupId> - </exclusion> - <exclusion> - <artifactId>arq</artifactId> - <groupId>com.hp.hpl.jena</groupId> - </exclusion> - <exclusion> - <artifactId>xercesImpl</artifactId> - <groupId>xerces</groupId> - </exclusion> - <exclusion> - <artifactId>any23-core</artifactId> - <groupId>org.deri.any23</groupId> - </exclusion> - </exclusions> - </dependency> - <dependency> <groupId>org.ini4j</groupId> <artifactId>ini4j</artifactId> </dependency> @@ -323,7 +301,7 @@ <dependency> <groupId>org.aksw.jena-sparql-api</groupId> <artifactId>jena-sparql-api-core</artifactId> - <version>2.10.0-3</version> + <version>2.10.0-4-SNAPSHOT</version> </dependency> </dependencies> <dependencyManagement> Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/NBR.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/NBR.java 2013-06-12 09:13:43 UTC (rev 3992) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/NBR.java 2013-06-13 07:46:18 UTC (rev 3993) @@ -18,7 +18,6 @@ import javax.xml.ws.http.HTTPException; -import org.aksw.commons.jena.ExtendedQueryEngineHTTP; import org.apache.log4j.Logger; import org.dllearner.algorithms.qtl.datastructures.QueryTree; import org.dllearner.algorithms.qtl.datastructures.impl.GeneralisedQueryTree; @@ -41,6 +40,7 @@ import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.RDFNode; +import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; import com.hp.hpl.jena.sparql.expr.E_Equals; import com.hp.hpl.jena.sparql.expr.E_LogicalNot; import com.hp.hpl.jena.sparql.expr.ExprVar; @@ -1401,8 +1401,8 @@ private ResultSet executeSelectQuery(String query){ ResultSet rs; if(model == null){ - ExtendedQueryEngineHTTP queryExecution = new ExtendedQueryEngineHTTP(endpoint.getURL().toString(), query); - queryExecution.setTimeOut(maxExecutionTimeInSeconds * 1000); + QueryEngineHTTP queryExecution = new QueryEngineHTTP(endpoint.getURL().toString(), query); + queryExecution.setTimeout(maxExecutionTimeInSeconds * 1000); for (String dgu : endpoint.getDefaultGraphURIs()) { queryExecution.addDefaultGraph(dgu); } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-12 09:13:43 UTC (rev 3992) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-13 07:46:18 UTC (rev 3993) @@ -21,8 +21,8 @@ import java.net.SocketTimeoutException; import java.net.URL; +import java.sql.SQLException; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -31,11 +31,17 @@ import java.util.SortedSet; import java.util.TreeMap; import java.util.TreeSet; +import java.util.concurrent.TimeUnit; -import org.aksw.commons.sparql.api.core.QueryExecutionFactory; -import org.aksw.commons.sparql.api.http.QueryExecutionFactoryHttp; -import org.aksw.commons.sparql.api.pagination.core.QueryExecutionFactoryPaginated; import org.aksw.commons.util.strings.StringUtils; +import org.aksw.jena_sparql_api.cache.core.QueryExecutionFactoryCacheEx; +import org.aksw.jena_sparql_api.cache.extra.CacheCoreEx; +import org.aksw.jena_sparql_api.cache.extra.CacheCoreH2; +import org.aksw.jena_sparql_api.cache.extra.CacheEx; +import org.aksw.jena_sparql_api.cache.extra.CacheExImpl; +import org.aksw.jena_sparql_api.core.QueryExecutionFactory; +import org.aksw.jena_sparql_api.http.QueryExecutionFactoryHttp; +import org.aksw.jena_sparql_api.pagination.core.QueryExecutionFactoryPaginated; import org.dllearner.core.ComponentAnn; import org.dllearner.core.IndividualReasoner; import org.dllearner.core.SchemaReasoner; @@ -61,7 +67,6 @@ import org.dllearner.kb.sparql.ExtractionDBCache; import org.dllearner.kb.sparql.SPARQLTasks; import org.dllearner.kb.sparql.SparqlEndpoint; -import org.dllearner.kb.sparql.SparqlQuery; import org.dllearner.utilities.datastructures.SortedSetTuple; import org.dllearner.utilities.owl.ConceptComparator; import org.slf4j.Logger; @@ -73,7 +78,6 @@ import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; -import com.hp.hpl.jena.query.ResultSetFormatter; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Resource; @@ -1360,22 +1364,26 @@ logger.debug("Sending query \n {}", query); ResultSet rs = null; if(ks.isRemote()){ - if(useCache && cache != null){ - rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(ks.getEndpoint(), query)); - } else { - QueryEngineHTTP queryExecution = new QueryEngineHTTP(ks.getEndpoint().getURL().toString(), query); - for (String dgu : ks.getEndpoint().getDefaultGraphURIs()) { - queryExecution.addDefaultGraph(dgu); + SparqlEndpoint endpoint = ks.getEndpoint(); + QueryExecutionFactory qef = new QueryExecutionFactoryHttp(endpoint.getURL().toString(), endpoint.getDefaultGraphURIs()); + if(cache != null){ + try { + long timeToLive = TimeUnit.DAYS.toMillis(30); + CacheCoreEx cacheBackend = CacheCoreH2.create(cache.getCacheDirectory(), timeToLive, true); + CacheEx cacheFrontend = new CacheExImpl(cacheBackend); + qef = new QueryExecutionFactoryCacheEx(qef, cacheFrontend); + } catch (ClassNotFoundException e) { + e.printStackTrace(); + } catch (SQLException e) { + e.printStackTrace(); } - for (String ngu : ks.getEndpoint().getNamedGraphURIs()) { - queryExecution.addNamedGraph(ngu); - } - rs = queryExecution.execSelect(); } + qef = new QueryExecutionFactoryPaginated(qef, 10000); + QueryExecution qe = qef.createQueryExecution(query); + rs = qe.execSelect(); } else { QueryExecution qExec = com.hp.hpl.jena.query.QueryExecutionFactory.create(query, ((LocalModelBasedSparqlEndpointKS)ks).getModel()); rs = qExec.execSelect(); - } return rs; } Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2013-06-12 09:13:43 UTC (rev 3992) +++ trunk/pom.xml 2013-06-13 07:46:18 UTC (rev 3993) @@ -123,7 +123,7 @@ <dependency> <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi-distribution</artifactId> - <version>3.4.3</version> + <version>3.4.4</version> <type>pom</type> </dependency> <dependency> @@ -133,6 +133,11 @@ </dependency> <dependency> <groupId>net.sourceforge.owlapi</groupId> + <artifactId>owlapi-debugging</artifactId> + <version>3.3</version> + </dependency> + <dependency> + <groupId>net.sourceforge.owlapi</groupId> <artifactId>owlapi-util</artifactId> <version>3.3</version> </dependency> @@ -524,6 +529,12 @@ <url>http://elk-reasoner.googlecode.com/svn/m2/releases</url> <releases/> </repository> + <repository> + <id>central</id> + <url>http://oss.sonatype.org</url> + <releases><enabled>true</enabled></releases> + <snapshots><enabled>true</enabled></snapshots> + </repository> </repositories> <distributionManagement> Modified: trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-06-12 09:13:43 UTC (rev 3992) +++ trunk/scripts/src/main/java/org/dllearner/scripts/pattern/OWLAxiomPatternUsageEvaluation.java 2013-06-13 07:46:18 UTC (rev 3993) @@ -225,7 +225,7 @@ Collections.shuffle(classesList, new Random(123)); classesList = classesList.subList(0, maxNrOfTestedClasses); classes = classesList; - //classes = Collections.singleton(new NamedClass("http://dbpedia.org/ontology/ChristianBishop")); + classes = Collections.singleton(new NamedClass("http://dbpedia.org/ontology/BaseballPlayer")); //get the maximum modal depth in the pattern axioms int maxModalDepth = maxModalDepth(patterns); @@ -1147,7 +1147,7 @@ //compute recall double recall = wald(subClassCnt, overlap); //if recall is too low we can skip the computation of the precision - if(recall < 0.2){ + if(recall < 0.3){ logger.warn("Recall(" + recall + ") too low. Skipping precision computation."); continue; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-12 09:13:46
|
Revision: 3992 http://sourceforge.net/p/dl-learner/code/3992 Author: lorenz_b Date: 2013-06-12 09:13:43 +0000 (Wed, 12 Jun 2013) Log Message: ----------- Added Maven exec plugin. Modified Paths: -------------- trunk/interfaces/pom.xml Modified: trunk/interfaces/pom.xml =================================================================== --- trunk/interfaces/pom.xml 2013-06-12 09:00:36 UTC (rev 3991) +++ trunk/interfaces/pom.xml 2013-06-12 09:13:43 UTC (rev 3992) @@ -447,8 +447,26 @@ </excludes> </configuration> </plugin> + <plugin> + <groupId>org.codehaus.mojo</groupId> + <artifactId>exec-maven-plugin</artifactId> + <version>1.2</version> + <executions> + <execution> + <goals> + <goal>exec</goal> + </goals> + </execution> + </executions> + <configuration> + <executable>java</executable> + <arguments> + <argument>-Xms512m</argument> + <argument>-Xmx4000m</argument> + </arguments> + </configuration> + </plugin> - </plugins> </build> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-12 09:00:39
|
Revision: 3991 http://sourceforge.net/p/dl-learner/code/3991 Author: lorenz_b Date: 2013-06-12 09:00:36 +0000 (Wed, 12 Jun 2013) Log Message: ----------- Added methods to export class hierarchy as set of axioms. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java Modified: trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java 2013-06-12 08:59:59 UTC (rev 3990) +++ trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java 2013-06-12 09:00:36 UTC (rev 3991) @@ -28,6 +28,8 @@ import org.apache.log4j.Logger; import org.dllearner.utilities.owl.ConceptComparator; +import org.dllearner.utilities.owl.OWLAPIConverter; +import org.semanticweb.owlapi.model.OWLAxiom; /** * Represents a subsumption hierarchy (ignoring equivalent concepts). @@ -381,6 +383,31 @@ return new ClassHierarchy(subsumptionHierarchyUpNew, subsumptionHierarchyDownNew); } + public Set<Axiom> toAxioms(){ + return toAxioms(Thing.instance); + } + + public Set<OWLAxiom> toOWLAPIAxioms(){ + Set<OWLAxiom> owlAxioms = new HashSet<OWLAxiom>(); + Set<Axiom> axioms = toAxioms(); + for(Axiom axiom : axioms){ + owlAxioms.add(OWLAPIConverter.getOWLAPIAxiom(axiom)); + } + return owlAxioms; + } + + public Set<Axiom> toAxioms(Description concept){ + Set<Axiom> axioms = new HashSet<Axiom>(); + Set<Description> subConcepts = subsumptionHierarchyDown.get(concept); + if (subConcepts != null) { + for (Description sub : subConcepts){ + axioms.add(new SubClassAxiom(sub, concept)); + axioms.addAll(toAxioms(sub)); + } + } + return axioms; + } + /** * Checks whether the description is contained in the hierarchy. * @param description This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-12 09:00:02
|
Revision: 3990 http://sourceforge.net/p/dl-learner/code/3990 Author: lorenz_b Date: 2013-06-12 08:59:59 +0000 (Wed, 12 Jun 2013) Log Message: ----------- Improved equivalence learning in enrichment script. Modified Paths: -------------- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-06-12 07:34:30 UTC (rev 3989) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-06-12 08:59:59 UTC (rev 3990) @@ -226,7 +226,7 @@ // restrict tested number of entities per type (only for testing purposes); // should be set to -1 in production mode - int maxEntitiesPerType = 5; + int maxEntitiesPerType = -1; // number of axioms which will be learned/considered (only applies to // some learners) @@ -345,7 +345,7 @@ // loop over all entities and call appropriate algorithms Set<NamedClass> classes = reasoner.getTypes();//st.getAllClasses(); - filterByNamespaces(classes);//classes = Sets.newHashSet(new NamedClass("http://dbpedia.org/ontology/GrandPrix")); + filterByNamespaces(classes);//classes = Sets.newHashSet(new NamedClass("http://dbpedia.org/ontology/AdministrativeRegion")); int entities = 0; for(NamedClass nc : classes) { try { @@ -425,7 +425,7 @@ Property predicate = st.getPredicate(); RDFNode object = st.getObject(); boolean startsWithAllowedNamespace = false; - if(predicate.equals(RDF.type)){ + if(predicate.equals(RDF.type) || predicate.equals(OWL.equivalentClass)){ if(object.isURIResource()){ for (String ns : allowedNamespaces) { if(object.asResource().getURI().startsWith(ns)){ @@ -517,11 +517,14 @@ runTime = System.currentTimeMillis() - startTime; System.out.println("done (" + model.size()+ " triples found in " + runTime + " ms)"); OWLOntology ontology = asOWLOntology(model); + if(reasoner.getClassHierarchy() != null){ + ontology.getOWLOntologyManager().addAxioms(ontology, reasoner.getClassHierarchy().toOWLAPIAxioms()); + } ksFragment = new OWLAPIOntology(ontology); // ksFragment.init(); rc = new FastInstanceChecker(ksFragment); rc.init(); - rc.setSubsumptionHierarchy(reasoner.getClassHierarchy()); +// rc.setSubsumptionHierarchy(reasoner.getClassHierarchy()); ksCached = ksFragment; rcCached = rc; // for (Individual ind : posExamples) { @@ -616,7 +619,7 @@ futures.add(threadPool.submit(new Callable<Model>() { @Override public Model call() throws Exception { - ConciseBoundedDescriptionGenerator cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getEndpoint(), cache, 2); + ConciseBoundedDescriptionGenerator cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getEndpoint(), "enrichment-cache", 2); return cbdGen.getConciseBoundedDescription(ind.getName()); } })); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-12 07:34:34
|
Revision: 3989 http://sourceforge.net/p/dl-learner/code/3989 Author: lorenz_b Date: 2013-06-12 07:34:30 +0000 (Wed, 12 Jun 2013) Log Message: ----------- Added class for query execution. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryExecutionFactoryHttp.java Modified: trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java 2013-06-11 13:07:42 UTC (rev 3988) +++ trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java 2013-06-12 07:34:30 UTC (rev 3989) @@ -71,6 +71,8 @@ } public SortedSet<Description> getSubClasses(Description concept) { + String s= concept.toString(); + SortedSet<Description> result = subsumptionHierarchyDown.get(concept); if(result == null) { logger.error("Query for sub class of " + concept + " in subsumption hierarchy, but the class is not contained in the (downward) hierarchy, e.g. because the class does not exist or is ignored. Returning empty result instead."); Added: trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryExecutionFactoryHttp.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryExecutionFactoryHttp.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/QueryExecutionFactoryHttp.java 2013-06-12 07:34:30 UTC (rev 3989) @@ -0,0 +1,60 @@ +package org.dllearner.kb.sparql; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import org.aksw.jena_sparql_api.core.QueryExecutionFactoryBackString; + +import com.google.common.base.Joiner; +import com.hp.hpl.jena.query.QueryExecution; + +/** + * @author Claus Stadler + * <p/> + * Date: 7/23/11 + * Time: 9:47 PM + */ +public class QueryExecutionFactoryHttp + extends QueryExecutionFactoryBackString +{ + private String service; + + private List<String> defaultGraphs = new ArrayList<String>(); + + public QueryExecutionFactoryHttp(String service) { + this(service, Collections.<String>emptySet()); + } + + public QueryExecutionFactoryHttp(String service, String defaultGraphName) { + this(service, Collections.singleton(defaultGraphName)); + } + + public QueryExecutionFactoryHttp(String service, Collection<String> defaultGraphs) { + this.service = service; + this.defaultGraphs = new ArrayList<String>(defaultGraphs); + Collections.sort(this.defaultGraphs); + } + + @Override + public String getId() { + return service; + } + + @Override + public String getState() { + return Joiner.on("|").join(defaultGraphs); + } + + @Override + public QueryExecution createQueryExecution(String queryString) { + QueryEngineHTTP result = new QueryEngineHTTP(service, queryString); + result.setDefaultGraphURIs(defaultGraphs); + + //QueryExecution result = QueryExecutionWrapper.wrap(engine); + + return result; + } +} + Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2013-06-11 13:07:42 UTC (rev 3988) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2013-06-12 07:34:30 UTC (rev 3989) @@ -310,6 +310,9 @@ // System.out.println("FIC: " + description + " " + individual); if (description instanceof NamedClass) { + if(((NamedClass) description).getURI().equals(Thing.instance.getURI())){ + return true; + } else if(!atomicConcepts.contains(description)) { throw new ReasoningMethodUnsupportedException("Class " + description + " is not contained in knowledge base."); } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-11 13:07:42 UTC (rev 3988) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-12 07:34:30 UTC (rev 3989) @@ -104,6 +104,8 @@ private Map<DatatypeProperty, Integer> dataPropertyPopularityMap; private boolean prepared = false; + + private ConceptComparator conceptComparator = new ConceptComparator(); public SPARQLReasoner(SparqlEndpointKS ks) { @@ -602,7 +604,6 @@ if(limit != 0) { query += " LIMIT " + limit; } - System.out.println("query: "+query); ResultSet rs = executeSelectQuery(query); QuerySolution qs; while(rs.hasNext()){ @@ -1149,16 +1150,22 @@ if(description instanceof Nothing){ description = new NamedClass("http://www.w3.org/2002/07/owl#Nothing"); } - SortedSet<Description> superClasses = new TreeSet<Description>(); + SortedSet<Description> superClasses = new TreeSet<Description>(conceptComparator); String query = String.format("SELECT ?sup {<%s> <%s> ?sup. FILTER(isIRI(?sup))}", ((NamedClass)description).getURI().toString(), RDFS.subClassOf.getURI() ); ResultSet rs = executeSelectQuery(query); QuerySolution qs; + String uri = null; while(rs.hasNext()){ qs = rs.next(); - superClasses.add(new NamedClass(qs.getResource("sup").getURI())); + uri = qs.getResource("sup").getURI(); + if(uri.equals(Thing.instance.getURI().toString())){ + superClasses.add(Thing.instance); + } else { + superClasses.add(new NamedClass(uri)); + } } superClasses.remove(description); return superClasses; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-11 13:07:45
|
Revision: 3988 http://sourceforge.net/p/dl-learner/code/3988 Author: lorenz_b Date: 2013-06-11 13:07:42 +0000 (Tue, 11 Jun 2013) Log Message: ----------- Added QueryExecutionFactory using TURTLE for CONSTRUCT queries. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java trunk/components-core/src/main/java/org/dllearner/utilities/examples/AutomaticNegativeExampleFinderSPARQL2.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLEntityTypeAdder.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2013-06-11 13:06:13 UTC (rev 3987) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2013-06-11 13:07:42 UTC (rev 3988) @@ -11,7 +11,7 @@ import org.aksw.jena_sparql_api.cache.extra.CacheEx; import org.aksw.jena_sparql_api.cache.extra.CacheExImpl; import org.aksw.jena_sparql_api.core.QueryExecutionFactory; -import org.aksw.jena_sparql_api.http.QueryExecutionFactoryHttp; +//import org.aksw.jena_sparql_api.http.QueryExecutionFactoryHttp; import org.aksw.jena_sparql_api.model.QueryExecutionFactoryModel; import org.aksw.jena_sparql_api.pagination.core.QueryExecutionFactoryPaginated; import org.apache.log4j.Level; Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-11 13:06:13 UTC (rev 3987) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/SPARQLReasoner.java 2013-06-11 13:07:42 UTC (rev 3988) @@ -492,7 +492,7 @@ * @return */ public Set<NamedClass> getSiblingClasses(NamedClass cls) { - Set<NamedClass> siblings = new HashSet<NamedClass>(); + Set<NamedClass> siblings = new TreeSet<NamedClass>(); String query = "SELECT ?sub WHERE { <" + cls.getName() + "> <http://www.w3.org/2000/01/rdf-schema#subClassOf> ?super ."; query += "?sub <http://www.w3.org/2000/01/rdf-schema#subClassOf> ?super ."; query += "FILTER( !SAMETERM(?sub, <" + cls.getName() + ">)) . }";System.out.println(query); Modified: trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java 2013-06-11 13:06:13 UTC (rev 3987) +++ trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java 2013-06-11 13:07:42 UTC (rev 3988) @@ -90,27 +90,25 @@ } return ret; } - + /** * shrinks a set to the limit takes the first elements up to limit * * @param set * @param limit */ - public static SortedSet<String> stableShrink(SortedSet<String> set, + public static <T> SortedSet<T> stableShrink(SortedSet<T> set, int limit) { if (set.size() <= limit) { return set; } - SortedSet<String> ret = new TreeSet<String>(); + SortedSet<T> ret = new TreeSet<T>(); - for (String oneInd : set) { + for (T oneInd : set) { ret.add(oneInd); if (ret.size() >= limit) break; - } - return ret; } Modified: trunk/components-core/src/main/java/org/dllearner/utilities/examples/AutomaticNegativeExampleFinderSPARQL2.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/examples/AutomaticNegativeExampleFinderSPARQL2.java 2013-06-11 13:06:13 UTC (rev 3987) +++ trunk/components-core/src/main/java/org/dllearner/utilities/examples/AutomaticNegativeExampleFinderSPARQL2.java 2013-06-11 13:07:42 UTC (rev 3988) @@ -196,7 +196,7 @@ siblingNegativeExamples.addAll(sr.getIndividualsExcluding(siblingClass, nc, limit)); } } - siblingNegativeExamples = SetManipulation.fuzzyShrink(siblingNegativeExamples, strategyLimit); + siblingNegativeExamples = SetManipulation.stableShrink(siblingNegativeExamples, strategyLimit); negativeExamples.addAll(siblingNegativeExamples); } else if(strategy == SUPERCLASS){//get super class based examples SortedSet<Individual> superClassNegativeExamples = new TreeSet<Individual>(); @@ -214,7 +214,7 @@ superClassNegativeExamples.addAll(sr.getIndividualsExcluding(superClass, nc, limit)); } } - superClassNegativeExamples = SetManipulation.fuzzyShrink(superClassNegativeExamples, strategyLimit); + superClassNegativeExamples = SetManipulation.stableShrink(superClassNegativeExamples, strategyLimit); negativeExamples.addAll(superClassNegativeExamples); } else if(strategy == RANDOM){//get some random examples Modified: trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLEntityTypeAdder.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLEntityTypeAdder.java 2013-06-11 13:06:13 UTC (rev 3987) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLEntityTypeAdder.java 2013-06-11 13:07:42 UTC (rev 3988) @@ -38,6 +38,16 @@ } } iterator.close(); + for (Property property : dataPropertyPredicates) { + if(!objectPropertyPredicates.contains(property)){ + model.add(property, RDF.type, OWL.DatatypeProperty); + } + } + for (Property property : objectPropertyPredicates) { + if(!dataPropertyPredicates.contains(property)){ + model.add(property, RDF.type, OWL.ObjectProperty); + } + } } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-11 13:06:16
|
Revision: 3987 http://sourceforge.net/p/dl-learner/code/3987 Author: lorenz_b Date: 2013-06-11 13:06:13 +0000 (Tue, 11 Jun 2013) Log Message: ----------- Improved enrichment script. Modified Paths: -------------- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-06-10 10:07:42 UTC (rev 3986) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2013-06-11 13:06:13 UTC (rev 3987) @@ -52,6 +52,11 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; import java.util.Set; import java.util.SortedSet; @@ -61,6 +66,7 @@ import joptsimple.OptionSpec; import org.aksw.commons.jena_owlapi.Conversion; +import org.aksw.jena_sparql_api.core.QueryExecutionFactory; import org.apache.jena.riot.checker.CheckerLiterals; import org.apache.jena.riot.system.ErrorHandlerFactory; import org.apache.log4j.ConsoleAppender; @@ -123,10 +129,10 @@ import org.dllearner.learningproblems.Heuristics.HeuristicType; import org.dllearner.reasoning.FastInstanceChecker; import org.dllearner.reasoning.SPARQLReasoner; +import org.dllearner.refinementoperators.RhoDRDown; import org.dllearner.utilities.EnrichmentVocabulary; import org.dllearner.utilities.Helper; import org.dllearner.utilities.PrefixCCMap; -import org.dllearner.utilities.datastructures.SetManipulation; import org.dllearner.utilities.datastructures.SortedSetTuple; import org.dllearner.utilities.examples.AutomaticNegativeExampleFinderSPARQL2; import org.dllearner.utilities.owl.OWLAPIAxiomConvertVisitor; @@ -151,13 +157,17 @@ import com.clarkparsia.owlapiv3.XSD; import com.google.common.collect.Sets; +import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFormatter; import com.hp.hpl.jena.rdf.model.Literal; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.Statement; +import com.hp.hpl.jena.rdf.model.StmtIterator; import com.hp.hpl.jena.sparql.engine.http.QueryExceptionHTTP; import com.hp.hpl.jena.vocabulary.OWL; import com.hp.hpl.jena.vocabulary.RDF; @@ -225,6 +235,8 @@ private int chunksize = 1000; private boolean omitExistingAxioms; private List<String> allowedNamespaces = new ArrayList<String>(); + private int maxNrOfPositiveExamples = 20; + private int maxNrOfNegativeExamples = 20; private boolean useInference; private SPARQLReasoner reasoner; @@ -333,7 +345,7 @@ // loop over all entities and call appropriate algorithms Set<NamedClass> classes = reasoner.getTypes();//st.getAllClasses(); - filterByNamespaces(classes); + filterByNamespaces(classes);//classes = Sets.newHashSet(new NamedClass("http://dbpedia.org/ontology/GrandPrix")); int entities = 0; for(NamedClass nc : classes) { try { @@ -406,6 +418,39 @@ } } + private void filterByNamespaces(Model model){ + if(allowedNamespaces != null && !allowedNamespaces.isEmpty()){ + for (StmtIterator iterator = model.listStatements(); iterator.hasNext();) { + Statement st = iterator.next(); + Property predicate = st.getPredicate(); + RDFNode object = st.getObject(); + boolean startsWithAllowedNamespace = false; + if(predicate.equals(RDF.type)){ + if(object.isURIResource()){ + for (String ns : allowedNamespaces) { + if(object.asResource().getURI().startsWith(ns)){ + startsWithAllowedNamespace = true; + break; + } + } + } else { + startsWithAllowedNamespace = true; + } + } else { + for (String ns : allowedNamespaces) { + if(predicate.getURI().startsWith(ns)){ + startsWithAllowedNamespace = true; + break; + } + } + } + if(!startsWithAllowedNamespace){ + iterator.remove(); + } + } + } + } + @SuppressWarnings("unchecked") private void runClassLearningAlgorithms(SparqlEndpointKS ks, NamedClass nc) throws ComponentInitException { System.out.println("Running algorithms for class " + nc); @@ -435,12 +480,10 @@ } private List<EvaluatedAxiom> applyCELOE(SparqlEndpointKS ks, NamedClass nc, boolean equivalence, boolean reuseKnowledgeSource) throws ComponentInitException { - // get instances of class as positive examples - SPARQLReasoner sr = new SPARQLReasoner(ks); System.out.print("finding positives ... "); long startTime = System.currentTimeMillis(); - SortedSet<Individual> posExamples = sr.getIndividuals(nc, 20); + SortedSet<Individual> posExamples = reasoner.getIndividuals(nc, maxNrOfPositiveExamples); long runTime = System.currentTimeMillis() - startTime; if(posExamples.isEmpty()){ System.out.println("Skipping CELOE because class " + nc.toString() + " is empty."); @@ -452,13 +495,11 @@ // use own implementation of negative example finder System.out.print("finding negatives ... "); startTime = System.currentTimeMillis(); - AutomaticNegativeExampleFinderSPARQL2 finder = new AutomaticNegativeExampleFinderSPARQL2(ks.getEndpoint()); - SortedSet<String> negExStr = finder.getNegativeExamples(nc.getName(), posExStr); - negExStr = SetManipulation.stableShrink(negExStr, 20); - SortedSet<Individual> negExamples = Helper.getIndividualSet(negExStr); + AutomaticNegativeExampleFinderSPARQL2 finder = new AutomaticNegativeExampleFinderSPARQL2(reasoner, "http://dbpedia.org/ontology"); + SortedSet<Individual> negExamples = finder.getNegativeExamples(nc, posExamples, maxNrOfNegativeExamples); SortedSetTuple<Individual> examples = new SortedSetTuple<Individual>(posExamples, negExamples); runTime = System.currentTimeMillis() - startTime; - System.out.println("done (" + negExStr.size()+ " examples found in " + runTime + " ms)"); + System.out.println("done (" + negExamples.size()+ " examples found in " + runTime + " ms)"); AbstractReasonerComponent rc; KnowledgeSource ksFragment; @@ -468,14 +509,11 @@ } else { System.out.print("extracting fragment ... ");//com.hp.hpl.jena.shared.impl.JenaParameters.enableEagerLiteralValidation = true; startTime = System.currentTimeMillis(); - ConciseBoundedDescriptionGenerator cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getEndpoint(), cache, 2); - Model model = ModelFactory.createDefaultModel(); - for(Individual example : Sets.union(posExamples, negExamples)){ - Model cbd = cbdGen.getConciseBoundedDescription(example.getName()); - model.add(cbd); - } + Model model = getFragmentMultithreaded(ks, Sets.union(posExamples, negExamples)); filter(model); + filterByNamespaces(model); OWLEntityTypeAdder.addEntityTypes(model); + runTime = System.currentTimeMillis() - startTime; System.out.println("done (" + model.size()+ " triples found in " + runTime + " ms)"); OWLOntology ontology = asOWLOntology(model); @@ -483,8 +521,12 @@ // ksFragment.init(); rc = new FastInstanceChecker(ksFragment); rc.init(); + rc.setSubsumptionHierarchy(reasoner.getClassHierarchy()); ksCached = ksFragment; rcCached = rc; +// for (Individual ind : posExamples) { +// System.out.println(ResultSetFormatter.asText(com.hp.hpl.jena.query.QueryExecutionFactory.create("SELECT * WHERE {<" + ind.getName() + "> ?p ?o. OPTIONAL{?o a ?o_type}}",model).execSelect())); +// } } /*//old way to get SPARQL fragment @@ -516,7 +558,7 @@ ksCached = ks2; rcCached = rc; }*/ - + ClassLearningProblem lp = new ClassLearningProblem(rc); lp.setClassToDescribe(nc); lp.setEquivalence(equivalence); @@ -528,7 +570,9 @@ CELOE la = new CELOE(lp, rc); la.setMaxExecutionTimeInSeconds(10); la.setNoisePercentage(25); + la.setMaxNrOfResults(100); la.init(); + ((RhoDRDown)la.getOperator()).setUseNegation(false); startTime = System.currentTimeMillis(); System.out.print("running CELOE (for " + (equivalence ? "equivalent classes" : "sub classes") + ") ... "); la.start(); @@ -554,6 +598,42 @@ return learnedAxioms; } + private Model getFragment(SparqlEndpointKS ks, Set<Individual> individuals){ + ConciseBoundedDescriptionGenerator cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getEndpoint(), cache, 2); + Model model = ModelFactory.createDefaultModel(); + for(Individual ind : individuals){ + Model cbd = cbdGen.getConciseBoundedDescription(ind.getName()); + model.add(cbd); + } + return model; + } + + private Model getFragmentMultithreaded(final SparqlEndpointKS ks, Set<Individual> individuals){ + Model model = ModelFactory.createDefaultModel(); + ExecutorService threadPool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); + List<Future<Model>> futures = new ArrayList<Future<Model>>(); + for (final Individual ind : individuals) { + futures.add(threadPool.submit(new Callable<Model>() { + @Override + public Model call() throws Exception { + ConciseBoundedDescriptionGenerator cbdGen = new ConciseBoundedDescriptionGeneratorImpl(ks.getEndpoint(), cache, 2); + return cbdGen.getConciseBoundedDescription(ind.getName()); + } + })); + } + for (Future<Model> future : futures) { + try { + model.add(future.get()); + } catch (InterruptedException e) { + e.printStackTrace(); + } catch (ExecutionException e) { + e.printStackTrace(); + } + } + threadPool.shutdown(); + return model; + } + private List<EvaluatedAxiom> applyLearningAlgorithm(Class<? extends AxiomLearningAlgorithm> algorithmClass, SparqlEndpointKS ks, Entity entity) throws ComponentInitException { AxiomLearningAlgorithm learner = null; try { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2013-06-10 10:07:45
|
Revision: 3986 http://sourceforge.net/p/dl-learner/code/3986 Author: lorenz_b Date: 2013-06-10 10:07:42 +0000 (Mon, 10 Jun 2013) Log Message: ----------- Added class for mapping from URI to variable. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java trunk/components-core/src/main/java/org/dllearner/core/AbstractReasonerComponent.java trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java trunk/components-core/src/main/java/org/dllearner/utilities/examples/AutomaticNegativeExampleFinderSPARQL2.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAxiomToSPARQLConverter.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/VariablesMapping.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java 2013-06-06 13:32:30 UTC (rev 3985) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/qtl/operations/lgg/NoiseSensitiveLGG.java 2013-06-10 10:07:42 UTC (rev 3986) @@ -9,6 +9,7 @@ import java.util.TreeSet; import org.dllearner.algorithms.qtl.datastructures.QueryTree; +import org.dllearner.learningproblems.Heuristics; import com.jamonapi.Monitor; import com.jamonapi.MonitorFactory; @@ -48,7 +49,7 @@ } } //compute score - double score = (trees.size() - uncoveredExamples.size()) / (double)trees.size(); + double score = Heuristics.getConfidenceInterval95WaldAverage(trees.size(), trees.size() - uncoveredExamples.size()); //add to todo list, if not already contained in todo list or solution list EvaluatedQueryTree<N> solution = new EvaluatedQueryTree<N>(lgg, uncoveredExamples, score); todo(solution); Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractReasonerComponent.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractReasonerComponent.java 2013-06-06 13:32:30 UTC (rev 3985) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractReasonerComponent.java 2013-06-10 10:07:42 UTC (rev 3986) @@ -1158,6 +1158,10 @@ } } + + public void setSubsumptionHierarchy(ClassHierarchy subsumptionHierarchy) { + this.subsumptionHierarchy = subsumptionHierarchy; + } public List<ObjectProperty> getAtomicRolesList() { if (atomicRolesList == null) Modified: trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java 2013-06-06 13:32:30 UTC (rev 3985) +++ trunk/components-core/src/main/java/org/dllearner/core/owl/ClassHierarchy.java 2013-06-10 10:07:42 UTC (rev 3986) @@ -346,7 +346,9 @@ // case 2: it is not allowed, so we try its super classes } else { Set<Description> tmp = subsumptionHierarchyUp.get(d); - superClasses.addAll(tmp); + if(tmp != null){ + superClasses.addAll(tmp); + } } } Modified: trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java 2013-06-06 13:32:30 UTC (rev 3985) +++ trunk/components-core/src/main/java/org/dllearner/utilities/datastructures/SetManipulation.java 2013-06-10 10:07:42 UTC (rev 3986) @@ -38,11 +38,11 @@ * @param set * @param limit */ - public static SortedSet<String> fuzzyShrink(SortedSet<String> set, int limit) { + public static <T> SortedSet<T> fuzzyShrink(SortedSet<T> set, int limit) { if (set.size() <= limit) { return set; } - SortedSet<String> ret = new TreeSet<String>(); + SortedSet<T> ret = new TreeSet<T>(); Random r = new Random(); double treshold = ((double) limit) / set.size(); // System.out.println("treshold"+howmany); @@ -50,7 +50,7 @@ // System.out.println("treshold"+treshold); while (ret.size() < limit) { - for (String oneInd : set) { + for (T oneInd : set) { if (r.nextDouble() < treshold) { ret.add(oneInd); if (ret.size() >= limit) Modified: trunk/components-core/src/main/java/org/dllearner/utilities/examples/AutomaticNegativeExampleFinderSPARQL2.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/examples/AutomaticNegativeExampleFinderSPARQL2.java 2013-06-06 13:32:30 UTC (rev 3985) +++ trunk/components-core/src/main/java/org/dllearner/utilities/examples/AutomaticNegativeExampleFinderSPARQL2.java 2013-06-10 10:07:42 UTC (rev 3986) @@ -36,16 +36,21 @@ import org.dllearner.core.owl.Description; import org.dllearner.core.owl.Individual; import org.dllearner.core.owl.NamedClass; +import org.dllearner.core.owl.Thing; import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SPARQLTasks; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.reasoning.SPARQLReasoner; import org.dllearner.utilities.datastructures.Datastructures; +import org.dllearner.utilities.datastructures.SetManipulation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import com.google.common.base.Predicate; import com.google.common.collect.HashMultiset; import com.google.common.collect.Multiset; import com.google.common.collect.Multisets; +import com.google.common.collect.Sets; /** * * Utility class for automatically retrieving negative examples from a @@ -57,6 +62,8 @@ */ public class AutomaticNegativeExampleFinderSPARQL2 { + private static final Logger logger = LoggerFactory.getLogger(AutomaticNegativeExampleFinderSPARQL2.class.getSimpleName()); + public enum Strategy{ SUPERCLASS, SIBLING, RANDOM; } @@ -113,6 +120,27 @@ return negEx; } + public SortedSet<Individual> getNegativeExamples(NamedClass classToDescribe, Set<Individual> positiveExamples, int limit) { + return getNegativeExamples(classToDescribe, positiveExamples, Arrays.asList(SUPERCLASS, SIBLING, RANDOM), limit); + } + + public SortedSet<Individual> getNegativeExamples(NamedClass classToDescribe, Set<Individual> positiveExamples, Collection<Strategy> strategies, int limit) { + Map<Strategy, Double> strategiesWithWeight = new HashMap<Strategy, Double>(); + double weight = 1d/strategies.size(); + for (Strategy strategy : strategies) { + strategiesWithWeight.put(strategy, weight); + } + return getNegativeExamples(classToDescribe, positiveExamples, strategiesWithWeight, limit); + } + + public SortedSet<Individual> getNegativeExamples(NamedClass classToDescribe, Set<Individual> positiveExamples, Map<Strategy, Double> strategiesWithWeight, int maxNrOfReturnedInstances) { + //set class to describe as the type for each instance + Multiset<NamedClass> types = HashMultiset.create(); + types.add(classToDescribe); + + return computeNegativeExamples(types, strategiesWithWeight, maxNrOfReturnedInstances); + } + public SortedSet<Individual> getNegativeExamples(Set<Individual> positiveExamples, int limit) { return getNegativeExamples(positiveExamples, Arrays.asList(SUPERCLASS, SIBLING, RANDOM), limit); } @@ -127,8 +155,6 @@ } public SortedSet<Individual> getNegativeExamples(Set<Individual> positiveExamples, Map<Strategy, Double> strategiesWithWeight, int maxNrOfReturnedInstances) { - SortedSet<Individual> negEx = new TreeSet<Individual>(); - //get the types for each instance Multiset<NamedClass> types = HashMultiset.create(); for (Individual ex : positiveExamples) { @@ -136,56 +162,89 @@ } //remove types that do not have the given namespace - if(namespace != null){ - types = Multisets.filter(types, new Predicate<NamedClass>() { - public boolean apply(NamedClass input){ - return input.getName().startsWith(namespace); - } - }); - } + types = filterByNamespace(types); //keep the most specific types keepMostSpecificClasses(types); + return computeNegativeExamples(types, strategiesWithWeight, maxNrOfReturnedInstances); + } + + private SortedSet<Individual> computeNegativeExamples(Multiset<NamedClass> positiveExamplesTypes, Map<Strategy, Double> strategiesWithWeight, int maxNrOfReturnedInstances) { + SortedSet<Individual> negativeExamples = new TreeSet<Individual>(); for (Entry<Strategy, Double> entry : strategiesWithWeight.entrySet()) { Strategy strategy = entry.getKey(); Double weight = entry.getValue(); //the max number of instances returned by the current strategy - int limit = (int)(weight * maxNrOfReturnedInstances); + int strategyLimit = (int)(weight * maxNrOfReturnedInstances); //the highest frequency value - int maxFrequency = types.entrySet().iterator().next().getCount(); - if(strategy == SIBLING){ - System.out.println("Sibling Classes Strategy"); - for (NamedClass nc : types.elementSet()) { - int frequency = types.count(nc); + int maxFrequency = positiveExamplesTypes.entrySet().iterator().next().getCount(); + + if(strategy == SIBLING){//get sibling class based examples + SortedSet<Individual> siblingNegativeExamples = new TreeSet<Individual>(); + //for each type of the positive examples + for (NamedClass nc : positiveExamplesTypes.elementSet()) { + int frequency = positiveExamplesTypes.count(nc); //get sibling classes Set<NamedClass> siblingClasses = sr.getSiblingClasses(nc); - int nrOfSiblings = siblingClasses.size(); - int v = (int)Math.ceil(((double)frequency / types.size()) / nrOfSiblings * limit); System.out.println(nc + ": " + v); + siblingClasses = filterByNamespace(siblingClasses); + System.out.println("Sibling classes: " + siblingClasses); + + int limit = (int)Math.ceil(((double)frequency / positiveExamplesTypes.size()) / siblingClasses.size() * strategyLimit); + //get instances for each sibling class for (NamedClass siblingClass : siblingClasses) { - negEx.addAll(sr.getIndividualsExcluding(siblingClass, nc, v)); + siblingNegativeExamples.addAll(sr.getIndividualsExcluding(siblingClass, nc, limit)); } + } + siblingNegativeExamples = SetManipulation.fuzzyShrink(siblingNegativeExamples, strategyLimit); + negativeExamples.addAll(siblingNegativeExamples); + } else if(strategy == SUPERCLASS){//get super class based examples + SortedSet<Individual> superClassNegativeExamples = new TreeSet<Individual>(); + //for each type of the positive examples + for (NamedClass nc : positiveExamplesTypes.elementSet()) { + int frequency = positiveExamplesTypes.count(nc); + //get super classes + Set<Description> superClasses = sr.getSuperClasses(nc); + superClasses.remove(new NamedClass(Thing.instance.getURI())); + superClasses = filterByNamespace(superClasses); - } - } else if(strategy == SUPERCLASS){ - System.out.println("Super Classes Strategy"); - for (NamedClass nc : types.elementSet()) { - int frequency = types.count(nc); - //get sibling classes - Set<Description> superClasses = sr.getSuperClasses(nc);System.out.println(superClasses); - int nrOfSuperClasses = superClasses.size(); - int v = (int)Math.ceil(((double)frequency / types.size()) / nrOfSuperClasses * limit); System.out.println(nc + ": " + v); + int limit = (int)Math.ceil(((double)frequency / positiveExamplesTypes.size()) / superClasses.size() * strategyLimit); + //get instances for each super class for (Description superClass : superClasses) { - negEx.addAll(sr.getIndividualsExcluding(superClass, nc, v)); + superClassNegativeExamples.addAll(sr.getIndividualsExcluding(superClass, nc, limit)); } } - } else if(strategy == RANDOM){ + superClassNegativeExamples = SetManipulation.fuzzyShrink(superClassNegativeExamples, strategyLimit); + negativeExamples.addAll(superClassNegativeExamples); + } else if(strategy == RANDOM){//get some random examples } } - return negEx; + return negativeExamples; } + private <T extends Description> Set<T> filterByNamespace(Set<T> classes){ + if(namespace != null){ + return Sets.filter(classes, new Predicate<T>() { + public boolean apply(T input){ + return input.toString().startsWith(namespace); + } + }); + } + return classes; + } + + private Multiset<NamedClass> filterByNamespace(Multiset<NamedClass> classes){ + if(namespace != null){ + return Multisets.filter(classes, new Predicate<NamedClass>() { + public boolean apply(NamedClass input){ + return input.getName().startsWith(namespace); + } + }); + } + return classes; + } + private void keepMostSpecificClasses(Multiset<NamedClass> classes){ HashMultiset<NamedClass> copy = HashMultiset.create(classes); final ClassHierarchy hierarchy = sr.getClassHierarchy(); Added: trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAxiomToSPARQLConverter.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAxiomToSPARQLConverter.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/OWLAxiomToSPARQLConverter.java 2013-06-10 10:07:42 UTC (rev 3986) @@ -0,0 +1,324 @@ +package org.dllearner.utilities.owl; + + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.io.ToStringRenderer; +import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; +import org.semanticweb.owlapi.model.OWLAnnotationPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLAnnotationPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLAxiomVisitor; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; +import org.semanticweb.owlapi.model.OWLClassExpression; +import org.semanticweb.owlapi.model.OWLDataFactory; +import org.semanticweb.owlapi.model.OWLDataProperty; +import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLDataRange; +import org.semanticweb.owlapi.model.OWLDatatypeDefinitionAxiom; +import org.semanticweb.owlapi.model.OWLDeclarationAxiom; +import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom; +import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointUnionAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom; +import org.semanticweb.owlapi.model.OWLFunctionalObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLHasKeyAxiom; +import org.semanticweb.owlapi.model.OWLIndividual; +import org.semanticweb.owlapi.model.OWLInverseFunctionalObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLInverseObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLLiteral; +import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; +import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLObjectPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLObjectPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLSameIndividualAxiom; +import org.semanticweb.owlapi.model.OWLSubAnnotationPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.owlapi.model.OWLSubDataPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; +import org.semanticweb.owlapi.model.OWLSymmetricObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.PrefixManager; +import org.semanticweb.owlapi.model.SWRLRule; +import org.semanticweb.owlapi.util.DefaultPrefixManager; + +import uk.ac.manchester.cs.owlapi.dlsyntax.DLSyntaxObjectRenderer; + +import com.hp.hpl.jena.query.Query; +import com.hp.hpl.jena.query.QueryFactory; +import com.hp.hpl.jena.query.Syntax; + +public class OWLAxiomToSPARQLConverter implements OWLAxiomVisitor{ + + private String root = "?x"; + private String sparql; + private OWLClassExpressionToSPARQLConverter expressionConverter; + + public String convert(String rootVariable, OWLAxiom axiom){ + this.root = rootVariable; + sparql = ""; + expressionConverter = new OWLClassExpressionToSPARQLConverter(); + axiom.accept(this); + return sparql; + } + + public Query asQuery(String rootVariable, OWLAxiom axiom){ + String queryString = "SELECT DISTINCT " + rootVariable + " WHERE {"; + queryString += convert(rootVariable, axiom); + queryString += "}"; + return QueryFactory.create(queryString, Syntax.syntaxARQ); + } + + @Override + public void visit(OWLAnnotationAssertionAxiom axiom) { + } + + @Override + public void visit(OWLSubAnnotationPropertyOfAxiom axiom) { + } + + @Override + public void visit(OWLAnnotationPropertyDomainAxiom axiom) { + } + + @Override + public void visit(OWLAnnotationPropertyRangeAxiom axiom) { + } + + @Override + public void visit(OWLDeclarationAxiom axiom) { + } + + @Override + public void visit(OWLSubClassOfAxiom axiom) { + OWLClassExpression subClass = axiom.getSubClass(); + String subClassPattern = expressionConverter.convert(root, subClass); + sparql += subClassPattern; + + OWLClassExpression superClass = axiom.getSuperClass(); + String superClassPattern = expressionConverter.convert(root, superClass); + sparql += superClassPattern; + } + + @Override + public void visit(OWLNegativeObjectPropertyAssertionAxiom axiom) { + } + + @Override + public void visit(OWLAsymmetricObjectPropertyAxiom axiom) { + } + + @Override + public void visit(OWLReflexiveObjectPropertyAxiom axiom) { + } + + @Override + public void visit(OWLDisjointClassesAxiom axiom) { + } + + @Override + public void visit(OWLDataPropertyDomainAxiom axiom) { + } + + @Override + public void visit(OWLObjectPropertyDomainAxiom axiom) { + OWLSubClassOfAxiom subClassOfAxiom = axiom.asOWLSubClassOfAxiom(); + } + + @Override + public void visit(OWLEquivalentObjectPropertiesAxiom axiom) { + } + + @Override + public void visit(OWLNegativeDataPropertyAssertionAxiom axiom) { + } + + @Override + public void visit(OWLDifferentIndividualsAxiom axiom) { + } + + @Override + public void visit(OWLDisjointDataPropertiesAxiom axiom) { + } + + @Override + public void visit(OWLDisjointObjectPropertiesAxiom axiom) { + } + + @Override + public void visit(OWLObjectPropertyRangeAxiom axiom) { + } + + @Override + public void visit(OWLObjectPropertyAssertionAxiom axiom) { + } + + @Override + public void visit(OWLFunctionalObjectPropertyAxiom axiom) { + } + + @Override + public void visit(OWLSubObjectPropertyOfAxiom axiom) { + } + + @Override + public void visit(OWLDisjointUnionAxiom axiom) { + } + + @Override + public void visit(OWLSymmetricObjectPropertyAxiom axiom) { + } + + @Override + public void visit(OWLDataPropertyRangeAxiom axiom) { + } + + @Override + public void visit(OWLFunctionalDataPropertyAxiom axiom) { + } + + @Override + public void visit(OWLEquivalentDataPropertiesAxiom axiom) { + } + + @Override + public void visit(OWLClassAssertionAxiom axiom) { + } + + @Override + public void visit(OWLEquivalentClassesAxiom axiom) { + } + + @Override + public void visit(OWLDataPropertyAssertionAxiom axiom) { + } + + @Override + public void visit(OWLTransitiveObjectPropertyAxiom axiom) { + } + + @Override + public void visit(OWLIrreflexiveObjectPropertyAxiom axiom) { + } + + @Override + public void visit(OWLSubDataPropertyOfAxiom axiom) { + } + + @Override + public void visit(OWLInverseFunctionalObjectPropertyAxiom axiom) { + } + + @Override + public void visit(OWLSameIndividualAxiom axiom) { + } + + @Override + public void visit(OWLSubPropertyChainOfAxiom axiom) { + } + + @Override + public void visit(OWLInverseObjectPropertiesAxiom axiom) { + } + + @Override + public void visit(OWLHasKeyAxiom axiom) { + } + + @Override + public void visit(OWLDatatypeDefinitionAxiom axiom) { + } + + @Override + public void visit(SWRLRule rule) { + } + + public static void main(String[] args) throws Exception { + ToStringRenderer.getInstance().setRenderer(new DLSyntaxObjectRenderer()); + OWLAxiomToSPARQLConverter converter = new OWLAxiomToSPARQLConverter(); + + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + OWLDataFactory df = man.getOWLDataFactory(); + PrefixManager pm = new DefaultPrefixManager("http://dbpedia.org/ontology/"); + + OWLClass clsA = df.getOWLClass("A", pm); + OWLClass clsB = df.getOWLClass("B", pm); + OWLClass clsC = df.getOWLClass("C", pm); + + OWLObjectProperty propR = df.getOWLObjectProperty("r", pm); + OWLObjectProperty propS = df.getOWLObjectProperty("s", pm); + + OWLDataProperty dpT = df.getOWLDataProperty("t", pm); + OWLDataRange booleanRange = df.getBooleanOWLDatatype(); + OWLLiteral lit = df.getOWLLiteral(1); + + OWLIndividual indA = df.getOWLNamedIndividual("a", pm); + OWLIndividual indB = df.getOWLNamedIndividual("b", pm); + + String rootVar = "?x"; + //NAMEDCLASS + OWLClassExpression subClass = clsA; + OWLClassExpression superClass = clsB; + OWLAxiom axiom = df.getOWLSubClassOfAxiom(subClass, superClass); + String query = converter.asQuery(rootVar, axiom).toString(); + System.out.println(axiom + "\n" + query); + //EXISTENTIAL RESTRICTION + superClass = df.getOWLObjectSomeValuesFrom(propR, clsB); + axiom = df.getOWLSubClassOfAxiom(subClass, superClass); + query = converter.asQuery(rootVar, axiom).toString(); + System.out.println(axiom + "\n" + query); + //INTERSECTION + superClass = df.getOWLObjectIntersectionOf( + df.getOWLObjectSomeValuesFrom(propR, clsB), + clsB); + axiom = df.getOWLSubClassOfAxiom(subClass, superClass); + query = converter.asQuery(rootVar, axiom).toString(); + System.out.println(axiom + "\n" + query); + //UNION + superClass = df.getOWLObjectUnionOf( + clsB, + clsC); + axiom = df.getOWLSubClassOfAxiom(subClass, superClass); + query = converter.asQuery(rootVar, axiom).toString(); + System.out.println(axiom + "\n" + query); + //HAS VALUE + superClass = df.getOWLObjectHasValue(propR, indA); + axiom = df.getOWLSubClassOfAxiom(subClass, superClass); + query = converter.asQuery(rootVar, axiom).toString(); + System.out.println(axiom + "\n" + query); + //UNIVERSAL RESTRICTION + superClass = df.getOWLObjectAllValuesFrom(propR, clsB); + axiom = df.getOWLSubClassOfAxiom(subClass, superClass); + query = converter.asQuery(rootVar, axiom).toString(); + System.out.println(axiom + "\n" + query); + // ONE OF + superClass = df.getOWLObjectOneOf(indA, indB); + axiom = df.getOWLSubClassOfAxiom(subClass, superClass); + query = converter.asQuery(rootVar, axiom).toString(); + System.out.println(axiom + "\n" + query); + //existential restriction with one of filler + superClass = df.getOWLObjectSomeValuesFrom(propR, df.getOWLObjectOneOf(indA, indB)); + axiom = df.getOWLSubClassOfAxiom(subClass, superClass); + query = converter.asQuery(rootVar, axiom).toString(); + System.out.println(axiom + "\n" + query); + + + + } + +} Added: trunk/components-core/src/main/java/org/dllearner/utilities/owl/VariablesMapping.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/VariablesMapping.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/VariablesMapping.java 2013-06-10 10:07:42 UTC (rev 3986) @@ -0,0 +1,38 @@ +package org.dllearner.utilities.owl; + +import java.util.HashMap; + +import org.semanticweb.owlapi.model.OWLEntity; + +public class VariablesMapping extends HashMap<OWLEntity, String>{ + + private int classCnt = 0; + private int propCnt = 0; + private int indCnt = 0; + + public String getVariable(OWLEntity entity){ + String var = get(entity); + if(var == null){ + if(entity.isOWLClass()){ + var = "?cls" + classCnt++; + } else if(entity.isOWLObjectProperty() || entity.isOWLDataProperty()){ + var = "?p" + propCnt++; + } else if(entity.isOWLNamedIndividual()){ + var = "?s" + indCnt++; + } + put(entity, var); + } + return var; + } + + public String newIndividualVariable(){ + return "?s" + indCnt++; + } + + public void reset(){ + clear(); + classCnt = 0; + propCnt = 0; + indCnt = 0; + } +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |