From: <ji...@us...> - 2011-11-18 15:35:24
|
Revision: 3418 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3418&view=rev Author: jialva Date: 2011-11-18 15:35:17 +0000 (Fri, 18 Nov 2011) Log Message: ----------- Going on deleting "positive" and "negative" examples references for DL-Learner fuzzy extension. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java trunk/components-core/src/main/java/org/dllearner/learningproblems/FuzzyPosNegLP.java trunk/test/fuzzydll/fuzzytrains.conf Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-11-18 10:26:51 UTC (rev 3417) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-11-18 15:35:17 UTC (rev 3418) @@ -384,9 +384,11 @@ // changed by Josue } else if (learningProblem instanceof PosNegLP) { examples = Helper.union(((PosNegLP)learningProblem).getPositiveExamples(),((PosNegLP)learningProblem).getNegativeExamples()); - } else if (learningProblem instanceof FuzzyPosNegLP) { - examples = Helper.union(((FuzzyPosNegLP)learningProblem).getPositiveExamples(),((FuzzyPosNegLP)learningProblem).getNegativeExamples()); - } + } + // commented by Josue as now there's no need of + and - examples (more code need to be deleted in this sense) + // else if (learningProblem instanceof FuzzyPosNegLP) { + //examples = Helper.union(((FuzzyPosNegLP)learningProblem).getPositiveExamples(),((FuzzyPosNegLP)learningProblem).getNegativeExamples()); + // } } @Override Modified: trunk/components-core/src/main/java/org/dllearner/learningproblems/FuzzyPosNegLP.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/learningproblems/FuzzyPosNegLP.java 2011-11-18 10:26:51 UTC (rev 3417) +++ trunk/components-core/src/main/java/org/dllearner/learningproblems/FuzzyPosNegLP.java 2011-11-18 15:35:17 UTC (rev 3418) @@ -176,7 +176,8 @@ */ @Override public void init() { - allExamples = Helper.union(positiveExamples, negativeExamples); + // commented by Josue as now there's no need of + and - examples (more code need to be deleted in this sense) + // allExamples = Helper.union(positiveExamples, negativeExamples); } public SortedSet<Individual> getNegativeExamples() { Modified: trunk/test/fuzzydll/fuzzytrains.conf =================================================================== --- trunk/test/fuzzydll/fuzzytrains.conf 2011-11-18 10:26:51 UTC (rev 3417) +++ trunk/test/fuzzydll/fuzzytrains.conf 2011-11-18 15:35:17 UTC (rev 3418) @@ -19,7 +19,7 @@ lp.type = "fuzzyPosNeg" lp.positiveExamples = { "ex:east1", "ex:east2" } lp.negativeExamples = { "ex:west6", "ex:west7" } -lp.fuzzyEx = [("ex:west6",0.0),("ex:west7",0.0)] +lp.fuzzyEx = [("ex:east1",1.0),("ex:east2",1.0),("ex:west6",0.0),("ex:west7",0.0)] // create learning algorithm to run alg.type = "Fuzzy CELOE" This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2011-11-21 14:44:06
|
Revision: 3421 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3421&view=rev Author: kurzum Date: 2011-11-21 14:43:55 +0000 (Mon, 21 Nov 2011) Log Message: ----------- added sparql capabilities to owlfile Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java Added Paths: ----------- trunk/examples/sparql/new_approach.conf Modified: trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java 2011-11-21 12:46:47 UTC (rev 3420) +++ trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java 2011-11-21 14:43:55 UTC (rev 3421) @@ -25,6 +25,7 @@ import java.net.URL; import java.util.Collection; import java.util.LinkedList; +import java.util.List; import org.apache.log4j.Logger; import org.dllearner.core.AbstractKnowledgeSource; @@ -32,131 +33,144 @@ import org.dllearner.core.ComponentInitException; import org.dllearner.core.OntologyFormat; import org.dllearner.core.OntologyFormatUnsupportedException; +import org.dllearner.core.config.ListStringEditor; import org.dllearner.core.options.ConfigEntry; import org.dllearner.core.options.ConfigOption; import org.dllearner.core.options.InvalidConfigOptionValueException; import org.dllearner.core.options.URLConfigOption; import org.dllearner.core.owl.KB; import org.dllearner.reasoning.OWLAPIDIGConverter; +import org.dllearner.utilities.URLencodeUTF8; /** * @author Jens Lehmann - * + * @author Sebastian Hellmann + * <p/> + * SH: I added SPARQL capabilities. Either URL is set directly or the basedir and filename is set or the URL and the SPARQL query is set */ @ComponentAnn(name = "OWL File", shortName = "owlfile", version = 0.9) public class OWLFile extends AbstractKnowledgeSource { - private static Logger logger = Logger.getLogger(OWLFile.class); - - // TODO: turn this into a config option - private URL url; + private static Logger logger = Logger.getLogger(OWLFile.class); + + // TODO: turn this into a config option + private URL url; private String fileName; - private String baseDir; -// private URL url; -// private OWLFileConfigurator configurator ; -// @Override -// public OWLFileConfigurator getConfigurator(){ -// return configurator; -// } + private String baseDir; - public static String getName() { - return "OWL file"; - } - - public OWLFile(){ + private String sparql = null; + private List<String> defaultGraphURIs = new LinkedList<String>(); + private List<String> namedGraphURIs = new LinkedList<String>(); - } - - public OWLFile(URL url) { - this.url = url; - } + public static String getName() { + return "OWL file"; + } - public OWLFile(String filename) { - try { - url = new File(filename).toURI().toURL(); - } catch (MalformedURLException e) { - e.printStackTrace(); - } - } - - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); - URLConfigOption urlOption = new URLConfigOption("url", "URL pointing to the OWL file", null, true, true); - urlOption.setRefersToFile(true); - options.add(urlOption); - return options; - } + public OWLFile() { - /* - * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) - */ - @Override - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { - - } + } - /* (non-Javadoc) - * @see org.dllearner.core.Component#init() - */ - @Override - public void init() throws ComponentInitException { - if(url == null) { + public OWLFile(URL url) { + this.url = url; + } + + public OWLFile(String filename) { + try { + url = new File(filename).toURI().toURL(); + } catch (MalformedURLException e) { + e.printStackTrace(); + } + } + + public static Collection<ConfigOption<?>> createConfigOptions() { + Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); + URLConfigOption urlOption = new URLConfigOption("url", "URL pointing to the OWL file", null, true, true); + urlOption.setRefersToFile(true); + options.add(urlOption); + return options; + } + + /* + * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) + */ + @Override + public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { + + } + + /* (non-Javadoc) + * @see org.dllearner.core.Component#init() + */ + @Override + public void init() throws ComponentInitException { + if (sparql != null) { + StringBuilder sb = new StringBuilder(); + + //make URL + sb.append(url.toString()); + sb.append("?query=").append(URLencodeUTF8.encode(sparql)); + sb.append("&format=application%2Frdf%2Bxml"); + + for (String graph : defaultGraphURIs) { + sb.append("&default-graph-uri=").append(URLencodeUTF8.encode(graph)); + } + for (String graph : namedGraphURIs) { + sb.append("&named-graph-uri=").append(URLencodeUTF8.encode(graph)); + } + logger.info(sb.toString()); + try { - url = new URL("file://"+ baseDir + "/" + fileName); + url = new URL(sb.toString()); } catch (MalformedURLException e) { throw new RuntimeException(e); } + + } else if (url == null) { + try { + url = new URL("file://" + baseDir + "/" + fileName); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } } - - /* - try { - url = new URL(configurator.getUrl()); - } catch (MalformedURLException e) { - logger.error(e.getMessage()); - //throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(),"malformed URL " + configurator.getUrl()); - } - */ - - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.KnowledgeSource#toDIG() - */ - @Override - public String toDIG(URI kbURI) { - // TODO: need some handling for cases where the URL was not set - return OWLAPIDIGConverter.getTellsString(url, OntologyFormat.RDF_XML, kbURI); - } + } - public URL getURL() { -// return configurator.getUrl(); - return url; - } - - public void setURL(URL url) { - this.url = url; -// configurator.setUrl(url); - } - /* (non-Javadoc) - * @see org.dllearner.core.KnowledgeSource#export(java.io.File, org.dllearner.core.OntologyFormat) - */ - @Override - public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { - // currently no export functions implemented, so we just throw an exception - throw new OntologyFormatUnsupportedException("export", format); - } + /* + * (non-Javadoc) + * + * @see org.dllearner.core.KnowledgeSource#toDIG() + */ + @Override + public String toDIG(URI kbURI) { + // TODO: need some handling for cases where the URL was not set + return OWLAPIDIGConverter.getTellsString(url, OntologyFormat.RDF_XML, kbURI); + } - /* (non-Javadoc) - * @see org.dllearner.core.KnowledgeSource#toKB() - */ - @Override - public KB toKB() { - throw new Error("OWL -> KB conversion not implemented yet."); - } + public URL getURL() { + return url; + } + public void setURL(URL url) { + this.url = url; + } + + /* (non-Javadoc) + * @see org.dllearner.core.KnowledgeSource#export(java.io.File, org.dllearner.core.OntologyFormat) + */ + @Override + public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { + // currently no export functions implemented, so we just throw an exception + throw new OntologyFormatUnsupportedException("export", format); + } + + /* (non-Javadoc) + * @see org.dllearner.core.KnowledgeSource#toKB() + */ + @Override + public KB toKB() { + throw new Error("OWL -> KB conversion not implemented yet."); + } + public String getBaseDir() { return baseDir; } @@ -165,13 +179,13 @@ this.baseDir = baseDir; } - public URL getUrl() { - return url; - } + public URL getUrl() { + return url; + } - public void setUrl(URL url) { - this.url = url; - } + public void setUrl(URL url) { + this.url = url; + } public String getFileName() { return fileName; @@ -180,4 +194,28 @@ public void setFileName(String fileName) { this.fileName = fileName; } + + public String getSparql() { + return sparql; + } + + public void setSparql(String sparql) { + this.sparql = sparql; + } + + public List<String> getDefaultGraphURIs() { + return defaultGraphURIs; + } + + public void setDefaultGraphURIs(List<String> defaultGraphURIs) { + this.defaultGraphURIs = defaultGraphURIs; + } + + public List<String> getNamedGraphURIs() { + return namedGraphURIs; + } + + public void setNamedGraphURIs(List<String> namedGraphURIs) { + this.namedGraphURIs = namedGraphURIs; + } } Added: trunk/examples/sparql/new_approach.conf =================================================================== --- trunk/examples/sparql/new_approach.conf (rev 0) +++ trunk/examples/sparql/new_approach.conf 2011-11-21 14:43:55 UTC (rev 3421) @@ -0,0 +1,40 @@ + + +// knowledge source definition +ks1.type = "OWL File" +ks1.url = "http://dbpedia.org/sparql" +ks1.defaultGraphURIs= {"http://dbpedia.org"} +ks1.sparql = " Construct { ?s1 ?p1 ?o1. ?o1 ?p2 ?o2 . ?p1 a owl:ObjectProperty . ?p2 a owl:ObjectProperty . ?s1 a owl:Thing . ?o1 a owl:Thing . ?o2 a owl:Thing . } { ?s1 ?p1 ?o1. OPTIONAL{ ?o1 ?p2 ?o2.} Filter (! (?p1 IN (rdf:type , <http://purl.org/dc/terms/subject>, owl:sameAs, <http://dbpedia.org/ontology/wikiPageExternalLink> ) )) . Filter (! (?p2 IN (rdf:type , <http://purl.org/dc/terms/subject>, owl:sameAs, <http://dbpedia.org/ontology/wikiPageExternalLink> ) )) . Filter (!isLiteral(?o1) && !isLiteral(?o2) ) . Filter ( ?s1 IN (<http://dbpedia.org/resource/London> , <http://dbpedia.org/resource/Berlin>)) } " + + +ks2.type = "OWL File" +ks2.url = "http://dbpedia.org/sparql" +ks2.defaultGraphURIs= {"http://dbpedia.org"} +ks2.sparql = "Construct { ?s1 a ?cl1 . ?s1 a owl:Thing . ?cl1 a owl:Class . ?o2 a ?cl2. ?o2 a owl:Thing . ?cl2 a owl:Class . } { ?s1 ?p1 ?o1 . OPTIONAL { ?o1 ?p2 ?o2. } Filter (! (?p1 IN (rdf:type , <http://purl.org/dc/terms/subject> ) )) . Filter (! (?p2 IN (rdf:type , <http://purl.org/dc/terms/subject> ) )) . Filter (!isLiteral(?o1) && !isLiteral(?o2) ) . ?s1 a ?cl1 . OPTIONAL {?o2 a ?cl2. } Filter ( ?cl1 LIKE <http://dbpedia.org/ontology/%> ) . Filter ( ?cl2 LIKE <http://dbpedia.org/ontology/%> ) . } " + +ks2.type = "OWL File" +ks2.url = "http://dbpedia.org/sparql" +ks2.defaultGraphURIs= {"http://dbpedia.org"} +ks2.sparql = "Construct { ?s1 a ?cl1 . ?s1 a owl:Thing . ?cl1 a owl:Class . ?o2 a ?cl2. ?o2 a owl:Thing . ?cl2 a owl:Class . } { ?s1 ?p1 ?o1 . OPTIONAL { ?o1 ?p2 ?o2. } Filter (! (?p1 IN (rdf:type , <http://purl.org/dc/terms/subject> ) )) . Filter (! (?p2 IN (rdf:type , <http://purl.org/dc/terms/subject> ) )) . Filter (!isLiteral(?o1) && !isLiteral(?o2) ) . ?s1 a ?cl1 . OPTIONAL {?o2 a ?cl2. } Filter ( ?cl1 LIKE <http://dbpedia.org/ontology/%> ) . Filter ( ?cl2 LIKE <http://dbpedia.org/ontology/%> ) . } " + + +reasoner.type = "fast instance checker" +reasoner.sources = {ks1, ks2} + +lp.type = "posNegStandard" +lp.positiveExamples = {"http://dbpedia.org/resource/Berlin"} +lp.negativeExamples = {"http://dbpedia.org/resource/London"} +lp.reasoner = reasoner + +// create a refinement operator and configure it +op.type = "rho" +op.useNegation = false +op.useAllConstructor = false +op.useCardinalityRestrictions = false +op.reasoner = reasoner + +// we use the OCEL algorithm +alg.type = "ocel" +alg.reasoner = reasoner +alg.learningProblem = lp +alg.operator = op This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-11-24 12:19:11
|
Revision: 3436 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3436&view=rev Author: lorenz_b Date: 2011-11-24 12:19:01 +0000 (Thu, 24 Nov 2011) Log Message: ----------- Changes to be able to load conf files under. Windows . Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java trunk/interfaces/src/main/java/org/dllearner/confparser3/ConfParserConfiguration.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java 2011-11-24 12:17:25 UTC (rev 3435) +++ trunk/components-core/src/main/java/org/dllearner/kb/KBFile.java 2011-11-24 12:19:01 UTC (rev 3436) @@ -22,6 +22,7 @@ import java.io.File; import java.io.FileNotFoundException; import java.net.URI; +import java.net.URISyntaxException; import java.util.Collection; import java.util.LinkedList; @@ -100,7 +101,7 @@ /** Leave it as is */ kb = KBParser.parseKBFile(getUrl()); } else { - File f = new File(baseDir, getUrl()); + File f = new File(new URI(baseDir + File.separator + getUrl())); setUrl(f.toURI().toString()); kb = KBParser.parseKBFile(f); } @@ -114,7 +115,9 @@ throw new ComponentInitException("KB file " + getUrl() + " could not be parsed correctly.", e); }catch (FileNotFoundException e) { throw new ComponentInitException("KB file " + getUrl() + " could not be found.", e); - } + } catch (URISyntaxException e) { + throw new ComponentInitException("KB file " + getUrl() + " could not be found.", e); + } } /* Modified: trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java 2011-11-24 12:17:25 UTC (rev 3435) +++ trunk/components-core/src/main/java/org/dllearner/kb/OWLFile.java 2011-11-24 12:19:01 UTC (rev 3436) @@ -127,7 +127,8 @@ } else if (url == null) { try { - url = new URL("file://" + baseDir + "/" + fileName); +// url = new URL("file://" + baseDir + "/" + fileName); + url = new URL(baseDir + "/" + fileName); } catch (MalformedURLException e) { throw new RuntimeException(e); } Modified: trunk/interfaces/src/main/java/org/dllearner/confparser3/ConfParserConfiguration.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/confparser3/ConfParserConfiguration.java 2011-11-24 12:17:25 UTC (rev 3435) +++ trunk/interfaces/src/main/java/org/dllearner/confparser3/ConfParserConfiguration.java 2011-11-24 12:19:01 UTC (rev 3436) @@ -29,7 +29,8 @@ public ConfParserConfiguration(Resource source) { try { - baseDir = source.getFile().getAbsoluteFile().getParent(); +// baseDir = source.getFile().getAbsoluteFile().getParent(); + baseDir = source.getFile().getParentFile().toURI().toString(); parser = new ConfParser(source.getInputStream()); parser.Start(); } catch (ParseException e) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-11-29 18:15:25
|
Revision: 3449 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3449&view=rev Author: jenslehmann Date: 2011-11-29 18:15:18 +0000 (Tue, 29 Nov 2011) Log Message: ----------- - fixed bug which occurred when using OCEL with manual heuristics in conf file - improved efficiency of rho refinement operator Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/MultiHeuristic.java trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java trunk/examples/family/grandfather.conf Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-11-29 10:19:02 UTC (rev 3448) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-11-29 18:15:18 UTC (rev 3449) @@ -276,12 +276,12 @@ if(operator == null) { operator = new RhoDRDown(); ((RhoDRDown)operator).setStartClass(startClass); - ((RhoDRDown)operator).setSubHierarchy(classHierarchy); ((RhoDRDown)operator).setReasoner(reasoner); - ((RhoDRDown)operator).init(); - } else { - ((RhoDRDown)operator).setSubHierarchy(classHierarchy); } + ((RhoDRDown)operator).setSubHierarchy(classHierarchy); + ((RhoDRDown)operator).setObjectPropertyHierarchy(reasoner.getObjectPropertyHierarchy()); + ((RhoDRDown)operator).setDataPropertyHierarchy(reasoner.getDatatypePropertyHierarchy()); + ((RhoDRDown)operator).init(); // operator = new RhoDRDown(reasoner, classHierarchy, startClass, configurator); baseURI = reasoner.getBaseURI(); prefixes = reasoner.getPrefixes(); @@ -1002,6 +1002,14 @@ public void setUseMinimizer(boolean useMinimizer) { this.useMinimizer = useMinimizer; + } + + public OEHeuristicRuntime getHeuristic() { + return heuristic; + } + + public void setHeuristic(OEHeuristicRuntime heuristic) { + this.heuristic = heuristic; } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/MultiHeuristic.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/MultiHeuristic.java 2011-11-29 10:19:02 UTC (rev 3448) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/MultiHeuristic.java 2011-11-29 18:15:18 UTC (rev 3449) @@ -215,4 +215,20 @@ public void setExpansionPenaltyFactor(double expansionPenaltyFactor) { this.expansionPenaltyFactor = expansionPenaltyFactor; } + + public int getNrOfNegativeExamples() { + return nrOfNegativeExamples; + } + + public void setNrOfNegativeExamples(int nrOfNegativeExamples) { + this.nrOfNegativeExamples = nrOfNegativeExamples; + } + + public int getNrOfExamples() { + return nrOfExamples; + } + + public void setNrOfExamples(int nrOfExamples) { + this.nrOfExamples = nrOfExamples; + } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java 2011-11-29 10:19:02 UTC (rev 3448) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/OCEL.java 2011-11-29 18:15:18 UTC (rev 3449) @@ -104,7 +104,7 @@ private File searchTreeFile; private boolean replaceSearchTree = false; private static String defaultSearchTreeFile = "log/searchTree.txt"; - private String heuristicStr = "multi"; +// private String heuristicStr = "multi"; Set<NamedClass> allowedConcepts; Set<ObjectProperty> allowedRoles; Set<NamedClass> ignoredConcepts; @@ -239,84 +239,6 @@ } /* (non-Javadoc) - * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) - */ - @Override - @SuppressWarnings({"unchecked"}) - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { - String name = entry.getOptionName(); - if(name.equals("writeSearchTree")) - writeSearchTree = (Boolean) entry.getValue(); - else if(name.equals("searchTreeFile")) - searchTreeFile = new File((String)entry.getValue()); - else if(name.equals("replaceSearchTree")) - replaceSearchTree = (Boolean) entry.getValue(); - else if(name.equals("heuristic")) { - String value = (String) entry.getValue(); - if(value.equals("lexicographic")) - heuristicStr = "lexicographic"; - else - heuristicStr = "flexible"; - } else if(name.equals("allowedConcepts")) { - allowedConcepts = CommonConfigMappings.getAtomicConceptSet((Set<String>)entry.getValue()); - } else if(name.equals("allowedRoles")) { - allowedRoles = CommonConfigMappings.getAtomicRoleSet((Set<String>)entry.getValue()); - } else if(name.equals("ignoredConcepts")) { - ignoredConcepts = CommonConfigMappings.getAtomicConceptSet((Set<String>)entry.getValue()); - } else if(name.equals("ignoredRoles")) { - ignoredRoles = CommonConfigMappings.getAtomicRoleSet((Set<String>)entry.getValue()); - } else if(name.equals("applyAllFilter")) { -// applyAllFilter = (Boolean) entry.getValue(); - } else if(name.equals("applyExistsFilter")) { -// applyExistsFilter = (Boolean) entry.getValue(); - } else if(name.equals("useTooWeakList")) { - useTooWeakList = (Boolean) entry.getValue(); - } else if(name.equals("useOverlyGeneralList")) { - useOverlyGeneralList = (Boolean) entry.getValue(); - } else if(name.equals("useShortConceptConstruction")) { - useShortConceptConstruction = (Boolean) entry.getValue(); - } else if(name.equals("improveSubsumptionHierarchy")) { - improveSubsumptionHierarchy = (Boolean) entry.getValue(); - } else if(name.equals("useAllConstructor")) { -// useAllConstructor = (Boolean) entry.getValue(); - } else if(name.equals("useExistsConstructor")) { -// useExistsConstructor = (Boolean) entry.getValue(); - } else if(name.equals("useHasValueConstructor")) { -// useHasValueConstructor = (Boolean) entry.getValue(); - } else if(name.equals("valueFrequencyThreshold")) { -// valueFrequencyThreshold = (Integer) entry.getValue(); - } else if(name.equals("useCardinalityRestrictions")) { -// useCardinalityRestrictions = (Boolean) entry.getValue(); - } else if(name.equals("useNegation")) { -// useNegation = (Boolean) entry.getValue(); - } else if(name.equals("noisePercentage")) { - noisePercentage = (Double) entry.getValue(); - } else if(name.equals("useBooleanDatatypes")) { -// useBooleanDatatypes = (Boolean) entry.getValue(); - } else if(name.equals("useDoubleDatatypes")) { -// useDoubleDatatypes = (Boolean) entry.getValue(); - } else if(name.equals("usePropernessChecks")) { - usePropernessChecks = (Boolean) entry.getValue(); - } else if(name.equals("maxPosOnlyExpansion")) { - maxPosOnlyExpansion = (Integer) entry.getValue(); - } else if(name.equals("startClass")) { - startClass = new NamedClass((String)entry.getValue()); - }else if(name.equals("maxExecutionTimeInSeconds")) { - maxExecutionTimeInSeconds = (Integer) entry.getValue(); - }else if(name.equals("minExecutionTimeInSeconds")) { - minExecutionTimeInSeconds = (Integer) entry.getValue(); - }else if(name.equals("guaranteeXgoodDescriptions")) { - guaranteeXgoodDescriptions = (Integer) entry.getValue(); - } else if(name.equals("maxClassDescriptionTests")) { - maxClassDescriptionTests = (Integer) entry.getValue(); - } else if(name.equals("logLevel")) { - logLevel = ((String)entry.getValue()).toUpperCase(); - } else if(name.equals("forceRefinementLengthIncrease")) { - forceRefinementLengthIncrease = (Boolean) entry.getValue(); - } - } - - /* (non-Javadoc) * @see org.dllearner.core.Component#init() */ @Override @@ -340,22 +262,47 @@ // adjust heuristic if(heuristic == null) { - if(heuristicStr == "lexicographic") - heuristic = new LexicographicHeuristic(); - else if(heuristicStr == "flexible") { - if(learningProblem instanceof PosOnlyLP) { - throw new RuntimeException("does not work with positive examples only yet"); - } - heuristic = new FlexibleHeuristic(((PosNegLP) getLearningProblem()).getNegativeExamples().size(), ((PosNegLP) getLearningProblem()).getPercentPerLengthUnit()); - } else { if(getLearningProblem() instanceof PosOnlyLP) { throw new RuntimeException("does not work with positive examples only yet"); // heuristic = new MultiHeuristic(((PosOnlyLP) getLearningProblem()).getPositiveExamples().size(),0, negativeWeight, startNodeBonus, expansionPenaltyFactor, negationPenalty); } else { heuristic = new MultiHeuristic(((PosNegLP) getLearningProblem()).getPositiveExamples().size(),((PosNegLP) getLearningProblem()).getNegativeExamples().size(), negativeWeight, startNodeBonus, expansionPenaltyFactor, negationPenalty); + } + + // OLD CODE below: in the new framework we assume that the + // heuristic is always injected as object (not as string) +// if(heuristicStr == "lexicographic") +// heuristic = new LexicographicHeuristic(); +// else if(heuristicStr == "flexible") { +// if(learningProblem instanceof PosOnlyLP) { +// throw new RuntimeException("does not work with positive examples only yet"); +// } +// heuristic = new FlexibleHeuristic(((PosNegLP) getLearningProblem()).getNegativeExamples().size(), ((PosNegLP) getLearningProblem()).getPercentPerLengthUnit()); +// } else { +// if(getLearningProblem() instanceof PosOnlyLP) { +// throw new RuntimeException("does not work with positive examples only yet"); +// // heuristic = new MultiHeuristic(((PosOnlyLP) getLearningProblem()).getPositiveExamples().size(),0, negativeWeight, startNodeBonus, expansionPenaltyFactor, negationPenalty); +// } else { +// heuristic = new MultiHeuristic(((PosNegLP) getLearningProblem()).getPositiveExamples().size(),((PosNegLP) getLearningProblem()).getNegativeExamples().size(), negativeWeight, startNodeBonus, expansionPenaltyFactor, negationPenalty); +// } +// } + } else { + // we need to set some variables to make the heuristic work + if(heuristic instanceof MultiHeuristic) { + MultiHeuristic mh = ((MultiHeuristic)heuristic); + if(mh.getNrOfNegativeExamples() == 0) { + mh.setNrOfNegativeExamples(((PosNegLP) getLearningProblem()).getNegativeExamples().size()); + } + int nrPosEx = ((PosNegLP) getLearningProblem()).getPositiveExamples().size(); + int nrNegEx = ((PosNegLP) getLearningProblem()).getNegativeExamples().size(); + if(mh.getNrOfExamples() == 0) { + mh.setNrOfExamples(nrPosEx + nrNegEx); + } + if(mh.getNrOfNegativeExamples() == 0) { + mh.setNrOfNegativeExamples(nrNegEx); + } } } - } // warn the user if he/she sets any non-standard heuristic, because it will just be ignored if(learningProblem instanceof PosNegLPStandard) { @@ -411,9 +358,7 @@ if(operator == null) { // we use a default operator and inject the class hierarchy for now operator = new RhoDRDown(); - ((RhoDRDown)operator).setSubHierarchy(classHierarchy); ((RhoDRDown)operator).setReasoner(reasoner); - ((RhoDRDown)operator).init(); // operator = new RhoDRDown( // reasoner, @@ -436,10 +381,11 @@ // useStringDatatypes, // instanceBasedDisjoints // ); - } else { - // we still have to inject the class hierarchy even if the operator is configured - operator.setSubHierarchy(classHierarchy); } + ((RhoDRDown)operator).setSubHierarchy(classHierarchy); + ((RhoDRDown)operator).setObjectPropertyHierarchy(reasoner.getObjectPropertyHierarchy()); + ((RhoDRDown)operator).setDataPropertyHierarchy(reasoner.getDatatypePropertyHierarchy()); + ((RhoDRDown)operator).init(); // create an algorithm object and pass all configuration // options to it @@ -572,14 +518,6 @@ this.replaceSearchTree = replaceSearchTree; } - public String getHeuristicStr() { - return heuristicStr; - } - - public void setHeuristicStr(String heuristic) { - this.heuristicStr = heuristic; - } - public Set<NamedClass> getAllowedConcepts() { return allowedConcepts; } Modified: trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2011-11-29 10:19:02 UTC (rev 3448) +++ trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2011-11-29 18:15:18 UTC (rev 3449) @@ -45,6 +45,7 @@ import org.dllearner.core.owl.Constant; import org.dllearner.core.owl.DataRange; import org.dllearner.core.owl.DatatypeProperty; +import org.dllearner.core.owl.DatatypePropertyHierarchy; import org.dllearner.core.owl.DatatypeSomeRestriction; import org.dllearner.core.owl.Description; import org.dllearner.core.owl.DoubleMaxValue; @@ -60,6 +61,7 @@ import org.dllearner.core.owl.ObjectMinCardinalityRestriction; import org.dllearner.core.owl.ObjectProperty; import org.dllearner.core.owl.ObjectPropertyExpression; +import org.dllearner.core.owl.ObjectPropertyHierarchy; import org.dllearner.core.owl.ObjectQuantorRestriction; import org.dllearner.core.owl.ObjectSomeRestriction; import org.dllearner.core.owl.ObjectValueRestriction; @@ -98,6 +100,8 @@ // hierarchies private ClassHierarchy subHierarchy; + private ObjectPropertyHierarchy objectPropertyHierarchy; + private DatatypePropertyHierarchy dataPropertyHierarchy; // domains and ranges private Map<ObjectProperty,Description> opDomains = new TreeMap<ObjectProperty,Description>(); @@ -262,6 +266,7 @@ // subHierarchy = rs.getClassHierarchy(); public void init() { // System.out.println("subHierarchy: " + subHierarchy); +// System.out.println("object properties: " + ); // query reasoner for domains and ranges // (because they are used often in the operator) @@ -560,7 +565,9 @@ // rule 2: EXISTS r.D => EXISTS s.D or EXISTS r^-1.D => EXISTS s^-1.D // currently inverse roles are not supported ObjectProperty ar = (ObjectProperty) role; - Set<ObjectProperty> moreSpecialRoles = reasoner.getSubProperties(ar); + // remove reasoner calls +// Set<ObjectProperty> moreSpecialRoles = reasoner.getSubProperties(ar); + Set<ObjectProperty> moreSpecialRoles = objectPropertyHierarchy.getMoreSpecialRoles(ar); for(ObjectProperty moreSpecialRole : moreSpecialRoles) refinements.add(new ObjectSomeRestriction(moreSpecialRole, description.getChild(0))); @@ -604,7 +611,8 @@ // rule 3: ALL r.D => ALL s.D or ALL r^-1.D => ALL s^-1.D // currently inverse roles are not supported ObjectProperty ar = (ObjectProperty) role; - Set<ObjectProperty> moreSpecialRoles = reasoner.getSubProperties(ar); +// Set<ObjectProperty> moreSpecialRoles = reasoner.getSubProperties(ar); + Set<ObjectProperty> moreSpecialRoles = objectPropertyHierarchy.getMoreSpecialRoles(ar); for(ObjectProperty moreSpecialRole : moreSpecialRoles) { refinements.add(new ObjectAllRestriction(moreSpecialRole, description.getChild(0))); } @@ -961,9 +969,9 @@ topComputationTimeNs += System.nanoTime() - topComputationTimeStartNs; // if(domain == null) { -// System.out.println("computed top refinements: " + topRefinementsCumulative.get(maxLength)); +// System.out.println("computed top refinements up to length " + topRefinementsLength + ": " + topRefinementsCumulative.get(maxLength)); // } else { -// System.out.println("computed top refinements: " + topARefinementsCumulative.get(domain).get(maxLength)); +// System.out.println("computed top refinements up to length " + topARefinementsLength + ": (domain: "+domain+"): " + topARefinementsCumulative.get(domain).get(maxLength)); // } } @@ -1002,6 +1010,7 @@ SortedSet<Description> m3 = new TreeSet<Description>(conceptComparator); if(useExistsConstructor) { // only uses most general roles +// System.out.println("EXISTS: " + reasoner.getMostGeneralProperties()); for(ObjectProperty r : reasoner.getMostGeneralProperties()) { m3.add(new ObjectSomeRestriction(r, new Thing())); } @@ -1674,4 +1683,20 @@ public void setCardinalityLimit(int cardinalityLimit) { this.cardinalityLimit = cardinalityLimit; } + + public ObjectPropertyHierarchy getObjectPropertyHierarchy() { + return objectPropertyHierarchy; + } + + public void setObjectPropertyHierarchy(ObjectPropertyHierarchy objectPropertyHierarchy) { + this.objectPropertyHierarchy = objectPropertyHierarchy; + } + + public DatatypePropertyHierarchy getDataPropertyHierarchy() { + return dataPropertyHierarchy; + } + + public void setDataPropertyHierarchy(DatatypePropertyHierarchy dataPropertyHierarchy) { + this.dataPropertyHierarchy = dataPropertyHierarchy; + } } \ No newline at end of file Modified: trunk/examples/family/grandfather.conf =================================================================== --- trunk/examples/family/grandfather.conf 2011-11-29 10:19:02 UTC (rev 3448) +++ trunk/examples/family/grandfather.conf 2011-11-29 18:15:18 UTC (rev 3449) @@ -40,12 +40,13 @@ op.reasoner = reasoner // create a heuristic and configure it -h.type = "multiheuristic" -h.expansionPenaltyFactor = 0.2 +// h.type = "multiheuristic" +// h.expansionPenaltyFactor = 0.2 // create learning algorithm to run alg.type = "ocel" alg.reasoner = reasoner alg.operator = op -alg.heuristic = h -alg.maxExecutionTimeInSeconds = 15 +// alg.heuristic = h +alg.maxExecutionTimeInSeconds = 5 + This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-11-30 11:50:40
|
Revision: 3450 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3450&view=rev Author: jenslehmann Date: 2011-11-30 11:50:32 +0000 (Wed, 30 Nov 2011) Log Message: ----------- annotated config options in CELOE Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java trunk/interfaces/doc/configOptions.html Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-11-29 18:15:18 UTC (rev 3449) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-11-30 11:50:32 UTC (rev 3450) @@ -30,8 +30,6 @@ import java.util.SortedSet; import java.util.TreeSet; -import javax.sound.midi.SysexMessage; - import org.apache.log4j.Logger; import org.dllearner.core.AbstractCELA; import org.dllearner.core.AbstractLearningProblem; @@ -39,18 +37,12 @@ import org.dllearner.core.ComponentAnn; import org.dllearner.core.ComponentInitException; import org.dllearner.core.EvaluatedDescription; -import org.dllearner.core.options.BooleanConfigOption; -import org.dllearner.core.options.CommonConfigOptions; -import org.dllearner.core.options.ConfigOption; -import org.dllearner.core.options.DoubleConfigOption; -import org.dllearner.core.options.StringConfigOption; +import org.dllearner.core.config.ConfigOption; import org.dllearner.core.owl.ClassHierarchy; import org.dllearner.core.owl.Description; import org.dllearner.core.owl.Individual; import org.dllearner.core.owl.Intersection; import org.dllearner.core.owl.NamedClass; -import org.dllearner.core.owl.ObjectProperty; -import org.dllearner.core.owl.ObjectSomeRestriction; import org.dllearner.core.owl.Restriction; import org.dllearner.core.owl.Thing; import org.dllearner.learningproblems.ClassLearningProblem; @@ -94,6 +86,7 @@ private RefinementOperator operator; private DescriptionMinimizer minimizer; + @ConfigOption(name="useMinimizer", defaultValue="true", description="Specifies whether returned expressions should be minimised by removing those parts, which are not needed. (Basically the minimiser tries to find the shortest expression which is equivalent to the learned expression). Turning this feature off may improve performance.") private boolean useMinimizer = true; // all nodes in the search tree (used for selecting most promising node) @@ -102,6 +95,7 @@ // root of search tree private OENode startNode; // the class with which we start the refinement process + @ConfigOption(name = "startClass", defaultValue="owl:Thing", description="You can specify a start class for the algorithm. To do this, you have to use Manchester OWL syntax without using prefixes.") private Description startClass; // all descriptions in the search tree plus those which were too weak (for fast redundancy check) @@ -111,6 +105,7 @@ // if true, then each solution is evaluated exactly instead of approximately // private boolean exactBestDescriptionEvaluation = false; + @ConfigOption(name = "singleSuggestionMode", defaultValue="false", description="Use this if you are interested in only one suggestion and your learning problem has many (more than 1000) examples.") private boolean singleSuggestionMode; private Description bestDescription; private double bestAccuracy = Double.MIN_VALUE; @@ -156,27 +151,37 @@ Set<NamedClass> allowedConcepts = null; Set<NamedClass> ignoredConcepts = null; + @ConfigOption(name = "writeSearchTree", defaultValue="false", description="specifies whether to write a search tree") private boolean writeSearchTree = false; + @ConfigOption(name = "searchTreeFile", defaultValue="log/searchTree.txt", description="file to use for the search tree") private String searchTreeFile = "log/searchTree.txt"; + @ConfigOption(name = "replaceSearchTree", defaultValue="false", description="specifies whether to replace the search tree in the log file after each run or append the new search tree") + private boolean replaceSearchTree = false; + + @ConfigOption(name = "maxNrOfResults", defaultValue="10", description="Sets the maximum number of results one is interested in. (Setting this to a lower value may increase performance as the learning algorithm has to store/evaluate/beautify less descriptions).") private int maxNrOfResults = 10; + @ConfigOption(name = "noisePercentage", defaultValue="0.0", description="the (approximated) percentage of noise within the examples") private double noisePercentage = 0.0; + @ConfigOption(name = "filterDescriptionsFollowingFromKB", defaultValue="false", description="If true, then the results will not contain suggestions, which already follow logically from the knowledge base. Be careful, since this requires a potentially expensive consistency check for candidate solutions.") private boolean filterDescriptionsFollowingFromKB = false; + @ConfigOption(name = "reuseExistingDescription", defaultValue="false", description="If true, the algorithm tries to find a good starting point close to an existing definition/super class of the given class in the knowledge base.") private boolean reuseExistingDescription = false; - private boolean replaceSearchTree = false; - + @ConfigOption(name = "maxClassDescriptionTests", defaultValue="0", description="The maximum number of candidate hypothesis the algorithm is allowed to test (0 = no limit). The algorithm will stop afterwards. (The real number of tests can be slightly higher, because this criterion usually won't be checked after each single test.)") private int maxClassDescriptionTests = 0; - @org.dllearner.core.config.ConfigOption(defaultValue = "10", name = "maxExecutionTimeInSeconds", description = "maximum execution of the algorithm in seconds") + @ConfigOption(defaultValue = "10", name = "maxExecutionTimeInSeconds", description = "maximum execution of the algorithm in seconds") private int maxExecutionTimeInSeconds = 10; + @ConfigOption(name = "terminateOnNoiseReached", defaultValue="false", description="specifies whether to terminate when noise criterion is met") private boolean terminateOnNoiseReached = false; + @ConfigOption(name = "maxDepth", defaultValue="7", description="maximum depth of description") private double maxDepth = 7; // public CELOEConfigurator getConfigurator() { @@ -192,44 +197,12 @@ // configurator = new CELOEConfigurator(this); } - public static Collection<Class<? extends AbstractLearningProblem>> supportedLearningProblems() { - Collection<Class<? extends AbstractLearningProblem>> problems = new LinkedList<Class<? extends AbstractLearningProblem>>(); - problems.add(AbstractLearningProblem.class); - return problems; - } +// public static Collection<Class<? extends AbstractLearningProblem>> supportedLearningProblems() { +// Collection<Class<? extends AbstractLearningProblem>> problems = new LinkedList<Class<? extends AbstractLearningProblem>>(); +// problems.add(AbstractLearningProblem.class); +// return problems; +// } - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); - options.add(CommonConfigOptions.useAllConstructor()); - options.add(CommonConfigOptions.useExistsConstructor()); - options.add(CommonConfigOptions.useHasValueConstructor()); - options.add(CommonConfigOptions.useDataHasValueConstructor()); - options.add(CommonConfigOptions.valueFreqencyThreshold()); - options.add(CommonConfigOptions.useCardinalityRestrictions()); - options.add(CommonConfigOptions.cardinalityLimit()); - // by default, we do not use negation (should be configurable in GUI) - options.add(CommonConfigOptions.useNegation(false)); - options.add(CommonConfigOptions.useBooleanDatatypes()); - options.add(CommonConfigOptions.useDoubleDatatypes()); - options.add(CommonConfigOptions.maxExecutionTimeInSeconds(10)); - options.add(CommonConfigOptions.getNoisePercentage()); - options.add(CommonConfigOptions.getTerminateOnNoiseReached(false)); - options.add(CommonConfigOptions.getMaxDepth(7)); - options.add(CommonConfigOptions.maxNrOfResults(10)); - options.add(CommonConfigOptions.maxClassDescriptionTests()); - options.add(new BooleanConfigOption("singleSuggestionMode", "Use this if you are interested in only one suggestion and your learning problem has many (more than 1000) examples.", false)); - options.add(CommonConfigOptions.getInstanceBasedDisjoints()); - options.add(new BooleanConfigOption("filterDescriptionsFollowingFromKB", "If true, then the results will not contain suggestions, which already follow logically from the knowledge base. Be careful, since this requires a potentially expensive consistency check for candidate solutions.", false)); - options.add(new BooleanConfigOption("reuseExistingDescription", "If true, the algorithm tries to find a good starting point close to an existing definition/super class of the given class in the knowledge base.", false)); - options.add(new BooleanConfigOption("writeSearchTree", "specifies whether to write a search tree", false)); - options.add(new StringConfigOption("searchTreeFile","file to use for the search tree", "log/searchTree.txt")); - options.add(new BooleanConfigOption("replaceSearchTree","specifies whether to replace the search tree in the log file after each run or append the new search tree", false)); - options.add(new DoubleConfigOption("expansionPenaltyFactor","heuristic penalty per syntactic construct used (lower = finds more complex expression, but might miss simple ones)", 0.1)); - options.add(CommonConfigOptions.allowedConcepts()); - options.add(CommonConfigOptions.ignoredConcepts()); - return options; - } - public static String getName() { return "CELOE"; } Modified: trunk/interfaces/doc/configOptions.html =================================================================== --- trunk/interfaces/doc/configOptions.html 2011-11-29 18:15:18 UTC (rev 3449) +++ trunk/interfaces/doc/configOptions.html 2011-11-30 11:50:32 UTC (rev 3450) @@ -21,7 +21,10 @@ <div class="LearningAlgorithm ClassExpressionLearningAlgorithm"><li><a href="#org.dllearner.algorithms.BruteForceLearner">Brute Force Learner</a></li></div> <div class="LearningAlgorithm ClassExpressionLearningAlgorithm"><li><a href="#org.dllearner.algorithms.celoe.CELOE">CELOE</a></li></div> <div class="LearningProblem"><li><a href="#org.dllearner.learningproblems.ClassLearningProblem">ClassLearningProblem</a></li></div> -<div class="KnowledgeSource"><li><a href="#org.dllearner.kb.KBFile">KB file</a></li></div> +<div class="LearningAlgorithm ClassExpressionLearningAlgorithm"><li><a href="#org.dllearner.algorithms.fuzzydll.FuzzyCELOE">Fuzzy CELOE</a></li></div> +<div class="ReasonerComponent"><li><a href="#org.dllearner.reasoning.fuzzydll.FuzzyOWLAPIReasoner">Fuzzy OWL API Reasoner</a></li></div> +<div class="LearningProblem"><li><a href="#org.dllearner.learningproblems.FuzzyPosNegLPStandard">FuzzyPosNegLPStandard</a></li></div> +<div class="KnowledgeSource"><li><a href="#org.dllearner.kb.KBFile">KB File</a></li></div> <div class="ReasonerComponent"><li><a href="#org.dllearner.reasoning.OWLAPIReasoner">OWL API Reasoner</a></li></div> <div class="LearningAlgorithm ClassExpressionLearningAlgorithm"><li><a href="#org.dllearner.algorithms.ocel.OCEL">OWL Class Expression Learner</a></li></div> <div class="KnowledgeSource"><li><a href="#org.dllearner.kb.OWLFile">OWL File</a></li></div> @@ -46,6 +49,7 @@ <div class="LearningAlgorithm AxiomLearningAlgorithm"><li><a href="#org.dllearner.algorithms.properties.ObjectPropertyDomainAxiomLearner">objectproperty domain axiom learner</a></li></div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><li><a href="#org.dllearner.algorithms.properties.ObjectPropertyRangeAxiomLearner">objectproperty range learner</a></li></div> <div class="LearningProblem"><li><a href="#org.dllearner.learningproblems.PosOnlyLP">positive only learning problem</a></li></div> +<div class="LearningAlgorithm"><li><a href="#org.dllearner.algorithm.qtl.QTL">query tree learner</a></li></div> <div class="RefinementOperator"><li><a href="#org.dllearner.refinementoperators.RhoDRDown">rho refinement operator</a></li></div> <div class="LearningAlgorithm AxiomLearningAlgorithm ClassExpressionLearningAlgorithm"><li><a href="#org.dllearner.algorithms.SimpleSubclassLearner">simple subclass learner</a></li></div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><li><a href="#org.dllearner.algorithms.properties.SymmetricObjectPropertyAxiomLearner">symmetric objectproperty axiom learner</a></li></div> @@ -54,12 +58,35 @@ <div class="LearningAlgorithm ClassExpressionLearningAlgorithm"><a name="org.dllearner.algorithms.BruteForceLearner"><h2>Brute Force Learner</h2></a> <p>short name: bruteForce<br />version: 0.8<br />implements: LearningAlgorithm, ClassExpressionLearningAlgorithm<br /></p>This component does not have configuration options.</div> <div class="LearningAlgorithm ClassExpressionLearningAlgorithm"><a name="org.dllearner.algorithms.celoe.CELOE"><h2>CELOE</h2></a> -<p>short name: celoe<br />version: 1.0<br />implements: LearningAlgorithm, ClassExpressionLearningAlgorithm<br />description: CELOE is an adapted and extended version of the OCEL algorithm applied for the ontology engineering use case. See http://jens-lehmann.org/files/2011/celoe.pdf for reference.<br /></p>This component does not have configuration options.</div> +<p>short name: celoe<br />version: 1.0<br />implements: LearningAlgorithm, ClassExpressionLearningAlgorithm<br />description: CELOE is an adapted and extended version of the OCEL algorithm applied for the ontology engineering use case. See http://jens-lehmann.org/files/2011/celoe.pdf for reference.<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> +<tr><td>useMinimizer</td><td>Specifies whether returned expressions should be minimised by removing those parts, which are not needed. (Basically the minimiser tries to find the shortest expression which is equivalent to the learned expression). Turning this feature off may improve performance.</td><td> boolean</td><td>true</td><td> false</td></tr> +<tr><td>terminateOnNoiseReached</td><td>specifies whether to terminate when noise criterion is met</td><td> boolean</td><td>false</td><td> false</td></tr> +<tr><td>reuseExistingDescription</td><td>If true, the algorithm tries to find a good starting point close to an existing definition/super class of the given class in the knowledge base.</td><td> boolean</td><td>false</td><td> false</td></tr> +<tr><td>filterDescriptionsFollowingFromKB</td><td>If true, then the results will not contain suggestions, which already follow logically from the knowledge base. Be careful, since this requires a potentially expensive consistency check for candidate solutions.</td><td> boolean</td><td>false</td><td> false</td></tr> +<tr><td>maxClassDescriptionTests</td><td>The maximum number of candidate hypothesis the algorithm is allowed to test (0 = no limit). The algorithm will stop afterwards. (The real number of tests can be slightly higher, because this criterion usually won't be checked after each single test.)</td><td> int</td><td>0</td><td> false</td></tr> +<tr><td>maxDepth</td><td>maximum depth of description</td><td> double</td><td>7</td><td> false</td></tr> +<tr><td>singleSuggestionMode</td><td>Use this if you are interested in only one suggestion and your learning problem has many (more than 1000) examples.</td><td> boolean</td><td>false</td><td> false</td></tr> +<tr><td>noisePercentage</td><td>the (approximated) percentage of noise within the examples</td><td> double</td><td>0.0</td><td> false</td></tr> +<tr><td>maxExecutionTimeInSeconds</td><td>maximum execution of the algorithm in seconds</td><td> int</td><td>10</td><td> false</td></tr> +<tr><td>writeSearchTree</td><td>specifies whether to write a search tree</td><td> boolean</td><td>false</td><td> false</td></tr> +<tr><td>startClass</td><td>You can specify a start class for the algorithm. To do this, you have to use Manchester OWL syntax without using prefixes.</td><td> Description</td><td>owl:Thing</td><td> false</td></tr> +<tr><td>maxNrOfResults</td><td>Sets the maximum number of results one is interested in. (Setting this to a lower value may increase performance as the learning algorithm has to store/evaluate/beautify less descriptions).</td><td> int</td><td>10</td><td> false</td></tr> +<tr><td>searchTreeFile</td><td>file to use for the search tree</td><td> String</td><td>log/searchTree.txt</td><td> false</td></tr> +<tr><td>replaceSearchTree</td><td>specifies whether to replace the search tree in the log file after each run or append the new search tree</td><td> boolean</td><td>false</td><td> false</td></tr> +</tbody></table> +</div> <div class="LearningProblem"><a name="org.dllearner.learningproblems.ClassLearningProblem"><h2>ClassLearningProblem</h2></a> <p>short name: clp<br />version: 0.6<br />implements: LearningProblem<br /></p>This component does not have configuration options.</div> -<div class="KnowledgeSource"><a name="org.dllearner.kb.KBFile"><h2>KB file</h2></a> +<div class="LearningAlgorithm ClassExpressionLearningAlgorithm"><a name="org.dllearner.algorithms.fuzzydll.FuzzyCELOE"><h2>Fuzzy CELOE</h2></a> +<p>short name: fceloe<br />version: 0.2<br />implements: LearningAlgorithm, ClassExpressionLearningAlgorithm<br />description: See Fuzzy DL-Learner paper published at ISDA 2011.<br /></p>This component does not have configuration options.</div> +<div class="ReasonerComponent"><a name="org.dllearner.reasoning.fuzzydll.FuzzyOWLAPIReasoner"><h2>Fuzzy OWL API Reasoner</h2></a> +<p>short name: foar<br />version: 0.2<br />implements: ReasonerComponent<br /></p>This component does not have configuration options.</div> +<div class="LearningProblem"><a name="org.dllearner.learningproblems.FuzzyPosNegLPStandard"><h2>FuzzyPosNegLPStandard</h2></a> +<p>short name: fuzzyPosNeg<br />version: 0.2<br />implements: LearningProblem<br /></p>This component does not have configuration options.</div> +<div class="KnowledgeSource"><a name="org.dllearner.kb.KBFile"><h2>KB File</h2></a> <p>short name: kbfile<br />version: 0.8<br />implements: KnowledgeSource<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>url</td><td>URL pointer to the KB file</td><td> String</td><td></td><td> false</td></tr> +<tr><td>fileName</td><td>relative or absolute path to KB file</td><td> String</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="ReasonerComponent"><a name="org.dllearner.reasoning.OWLAPIReasoner"><h2>OWL API Reasoner</h2></a> @@ -75,17 +102,17 @@ <div class="LearningProblem"><a name="org.dllearner.learningproblems.PosNegLPStandard"><h2>PosNegLPStandard</h2></a> <p>short name: posNegStandard<br />version: 0.8<br />implements: LearningProblem<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>accuracyMethod</td><td>Specifies, which method/function to use for computing accuracy.</td><td> String</td><td>predacc</td><td> false</td></tr> +<tr><td>useApproximations</td><td>Use Approximations</td><td> boolean</td><td>false</td><td> false</td></tr> <tr><td>approxDelta</td><td>The Approximate Delta</td><td> double</td><td>0.05</td><td> false</td></tr> -<tr><td>useApproximations</td><td>Use Approximations</td><td> boolean</td><td>false</td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm ClassExpressionLearningAlgorithm"><a name="org.dllearner.algorithms.RandomGuesser"><h2>Random Guesser</h2></a> <p>short name: randomGuesser<br />version: 0.8<br />implements: LearningAlgorithm, ClassExpressionLearningAlgorithm<br /></p>This component does not have configuration options.</div> <div class="KnowledgeSource"><a name="org.dllearner.kb.SparqlEndpointKS"><h2>SPARQL endpoint</h2></a> <p>short name: sparql<br />version: 0.2<br />implements: KnowledgeSource<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> +<tr><td>defaultGraphs</td><td>no description available</td><td> List</td><td>[]</td><td> false</td></tr> <tr><td>namedGraphs</td><td>no description available</td><td> List</td><td>[]</td><td> false</td></tr> <tr><td>url</td><td>no description available</td><td> URL</td><td></td><td> true</td></tr> -<tr><td>defaultGraphs</td><td>no description available</td><td> List</td><td>[]</td><td> false</td></tr> </tbody></table> </div> <div class="KnowledgeSource"><a name="org.dllearner.kb.sparql.SparqlKnowledgeSource"><h2>SPARQL endpoint fragment</h2></a> @@ -93,49 +120,41 @@ <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.SubDataPropertyOfAxiomLearner"><h2>data subPropertyOf axiom learner</h2></a> <p>short name: dplsubprop<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> DatatypeProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.DataPropertyDomainAxiomLearner"><h2>dataproperty domain axiom learner</h2></a> <p>short name: dpldomain<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> DatatypeProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.DataPropertyRangeAxiomLearner"><h2>dataproperty range learner</h2></a> <p>short name: dblrange<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> DatatypeProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm ClassExpressionLearningAlgorithm"><a name="org.dllearner.algorithms.DisjointClassesLearner"><h2>disjoint classes learner</h2></a> <p>short name: cldisjoint<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm, ClassExpressionLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> <tr><td>classToDescribe</td><td></td><td> NamedClass</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.DisjointDataPropertyAxiomLearner"><h2>disjoint dataproperty axiom learner</h2></a> <p>short name: dpldisjoint<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> DatatypeProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.DisjointObjectPropertyAxiomLearner"><h2>disjoint objectproperty axiom learner</h2></a> <p>short name: opldisjoint<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> ObjectProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.EquivalentDataPropertyAxiomLearner"><h2>equivalent dataproperty axiom learner</h2></a> <p>short name: dplequiv<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> DatatypeProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.EquivalentObjectPropertyAxiomLearner"><h2>equivalent objectproperty axiom learner</h2></a> <p>short name: oplequiv<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> ObjectProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="ReasonerComponent"><a name="org.dllearner.reasoning.FastInstanceChecker"><h2>fast instance checker</h2></a> @@ -147,84 +166,77 @@ <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.FunctionalDataPropertyAxiomLearner"><h2>functional dataproperty axiom learner</h2></a> <p>short name: dplfunc<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> DatatypeProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.FunctionalObjectPropertyAxiomLearner"><h2>functional objectproperty axiom learner</h2></a> <p>short name: oplfunc<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> ObjectProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.InverseFunctionalObjectPropertyAxiomLearner"><h2>inversefunctional objectproperty axiom learner</h2></a> <p>short name: oplinvfunc<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> ObjectProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="OtherComponent"><a name="org.dllearner.algorithms.ocel.MultiHeuristic"><h2>multiple criteria heuristic</h2></a> <p>short name: multiheuristic<br />version: 0.7<br />implements: OtherComponent<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> +<tr><td>negationPenalty</td><td>no description available</td><td> int</td><td>0</td><td> false</td></tr> +<tr><td>expansionPenaltyFactor</td><td>no description available</td><td> double</td><td>0.02</td><td> false</td></tr> <tr><td>negativeWeight</td><td>no description available</td><td> double</td><td>1.0</td><td> false</td></tr> <tr><td>gainBonusFactor</td><td>no description available</td><td> double</td><td>0.5</td><td> false</td></tr> +<tr><td>nodeChildPenalty</td><td>no description available</td><td> double</td><td>0.0001</td><td> false</td></tr> <tr><td>startNodeBonus</td><td>no description available</td><td> double</td><td>0.1</td><td> false</td></tr> -<tr><td>negationPenalty</td><td>no description available</td><td> int</td><td>0</td><td> false</td></tr> -<tr><td>nodeChildPenalty</td><td>no description available</td><td> double</td><td>0.0001</td><td> false</td></tr> -<tr><td>expansionPenaltyFactor</td><td>no description available</td><td> double</td><td>0.02</td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.SubObjectPropertyOfAxiomLearner"><h2>object subPropertyOf axiom learner</h2></a> <p>short name: oplsubprop<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> ObjectProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.ObjectPropertyDomainAxiomLearner"><h2>objectproperty domain axiom learner</h2></a> <p>short name: opldomain<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> ObjectProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.ObjectPropertyRangeAxiomLearner"><h2>objectproperty range learner</h2></a> <p>short name: oplrange<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> ObjectProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningProblem"><a name="org.dllearner.learningproblems.PosOnlyLP"><h2>positive only learning problem</h2></a> <p>short name: posonlylp<br />version: 0.6<br />implements: LearningProblem<br /></p>This component does not have configuration options.</div> +<div class="LearningAlgorithm"><a name="org.dllearner.algorithm.qtl.QTL"><h2>query tree learner</h2></a> +<p>short name: qtl<br />version: 0.8<br />implements: LearningAlgorithm<br /></p>This component does not have configuration options.</div> <div class="RefinementOperator"><a name="org.dllearner.refinementoperators.RhoDRDown"><h2>rho refinement operator</h2></a> <p>short name: rho<br />version: 0.8<br />implements: RefinementOperator<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> -<tr><td>instanceBasedDisjoints</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> -<tr><td>useAllConstructor</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> -<tr><td>disjointChecks</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> +<tr><td>applyAllFilter</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> +<tr><td>useNegation</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> <tr><td>applyExistsFilter</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> +<tr><td>useCardinalityRestrictions</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> +<tr><td>useStringDatatypes</td><td>no description available</td><td> boolean</td><td>false</td><td> false</td></tr> <tr><td>useHasValueConstructor</td><td>no description available</td><td> boolean</td><td>false</td><td> false</td></tr> -<tr><td>applyAllFilter</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> -<tr><td>useStringDatatypes</td><td>no description available</td><td> boolean</td><td>false</td><td> false</td></tr> <tr><td>useBooleanDatatypes</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> <tr><td>useDoubleDatatypes</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> +<tr><td>instanceBasedDisjoints</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> +<tr><td>useAllConstructor</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> +<tr><td>dropDisjuncts</td><td>no description available</td><td> boolean</td><td>false</td><td> false</td></tr> <tr><td>useExistsConstructor</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> -<tr><td>dropDisjuncts</td><td>no description available</td><td> boolean</td><td>false</td><td> false</td></tr> -<tr><td>useNegation</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> -<tr><td>useCardinalityRestrictions</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> +<tr><td>disjointChecks</td><td>no description available</td><td> boolean</td><td>true</td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm ClassExpressionLearningAlgorithm"><a name="org.dllearner.algorithms.SimpleSubclassLearner"><h2>simple subclass learner</h2></a> <p>short name: clsub<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm, ClassExpressionLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>classToDescribe</td><td></td><td> NamedClass</td><td></td><td> true</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.SymmetricObjectPropertyAxiomLearner"><h2>symmetric objectproperty axiom learner</h2></a> <p>short name: oplsymm<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> ObjectProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> <div class="LearningAlgorithm AxiomLearningAlgorithm"><a name="org.dllearner.algorithms.properties.TransitiveObjectPropertyAxiomLearner"><h2>transitive objectproperty axiom learner</h2></a> <p>short name: opltrans<br />version: 0.1<br />implements: LearningAlgorithm, AxiomLearningAlgorithm<br /></p><table id="hor-minimalist-a"><thead><tr><th>option name</th><th>description</th><th>type</th><th>default value</th><th>required?</th></tr></thead><tbody> <tr><td>propertyToDescribe</td><td></td><td> ObjectProperty</td><td></td><td> false</td></tr> -<tr><td>maxFetchedRows</td><td>The maximum number of rows fetched from the endpoint to approximate the result.</td><td> int</td><td></td><td> false</td></tr> </tbody></table> </div> </body></html> \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-11-30 17:01:18
|
Revision: 3453 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3453&view=rev Author: jenslehmann Date: 2011-11-30 17:01:10 +0000 (Wed, 30 Nov 2011) Log Message: ----------- updated manual Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java trunk/examples/datatypes/stringtyped.conf trunk/interfaces/doc/manual/bibliography.bib trunk/interfaces/doc/manual/manual.tex Modified: trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2011-11-30 16:09:23 UTC (rev 3452) +++ trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2011-11-30 17:01:10 UTC (rev 3453) @@ -353,7 +353,7 @@ valueFrequency = null; dataValueFrequency = null; - System.out.println("freqDataValues: " + frequentDataValues); +// System.out.println("freqDataValues: " + frequentDataValues); // compute splits for double datatype properties for(DatatypeProperty dp : reasoner.getDoubleDatatypeProperties()) { Modified: trunk/examples/datatypes/stringtyped.conf =================================================================== --- trunk/examples/datatypes/stringtyped.conf 2011-11-30 16:09:23 UTC (rev 3452) +++ trunk/examples/datatypes/stringtyped.conf 2011-11-30 17:01:10 UTC (rev 3453) @@ -29,8 +29,9 @@ op.type = "rho" op.useDataHasValueConstructor=true +op.useStringDatatypes = true alg.type = "ocel" alg.searchTreeFile = "log/stringTypedTree.txt" -alg.writeSearchTree = true +// alg.writeSearchTree = true Modified: trunk/interfaces/doc/manual/bibliography.bib =================================================================== --- trunk/interfaces/doc/manual/bibliography.bib 2011-11-30 16:09:23 UTC (rev 3452) +++ trunk/interfaces/doc/manual/bibliography.bib 2011-11-30 17:01:10 UTC (rev 3453) @@ -107,3 +107,69 @@ volume = "4894", pages = "161--174" } + +@ARTICLE{auer-swj-2010, + author = {Sören Auer and Jens Lehmann}, + title = {Making the Web a Data Washing Machine - Creating Knowledge out of + Interlinked Data}, + journal = {Semantic Web Journal}, + year = {2010}, + keywords = {2010 group_aksw auer lehmann mole event_swj sys:relevantFor:infai + sys:relevantFor:bis seebiproject_OntoWiki peer-reviewed ontowiki_eu}, + timestamp = {2010.01.17}, + url = {http://www.jens-lehmann.org/files/2010/washing_machine_swj.pdf} +} + +@INCOLLECTION{sh_scalability_2011, + author = {Sebastian Hellmann and Jens Lehmann and Sören Auer}, + title = {Learning of OWL Class Expressions on Very Large Knowledge Bases and + its Applications.}, + booktitle = {Learning of OWL Class Expressions on Very Large Knowledge Bases and + its Applications}, + publisher = {IGI Global}, + year = {2011}, + editor = {Semantic Services, Interoperability and Web Applications: Emerging + Concepts}, + chapter = {5}, + pages = {104-130}, + doi = {doi:10.4018/978-1-60960-593-3}, + keywords = {peer-reviewed 2011 hellmann lehmann auer group_aksw mole sys:relevantFor:infai + sys:relevantFor:bis}, + owner = {sebastian}, + timestamp = {2011.06.27} +} + +@INPROCEEDINGS{hanne, + author = {Sebastian Hellmann and Jörg Unbehauen and Jens Lehmann}, + title = {HANNE - A Holistic Application for Navigational Knowledge Engineering}, + booktitle = {Posters and Demos of ISWC 2010}, + year = {2010}, + keywords = {2010 group_aksw event_iswc hellmann unbehauen lehmann mole sys:relevantFor:infai + sys:relevantFor:bis dllearner}, + url = {http://iswc2010.semanticweb.org/pdf/522.pdf} +} + +@INPROCEEDINGS{fuzzy, + author = {Josué Iglesias and Jens Lehmann}, + title = {Towards Integrating Fuzzy Logic Capabilities into an Ontology-based + Inductive Logic Programming Framework}, + booktitle = {Proc. of the 11th International Conference on Intelligent Systems + Design and Applications (ISDA)}, + year = {2011}, + keywords = {2011 group_aksw lehmann mole sys:relevantFor:infai sys:relevantFor:bis + sys:relevantFor:lod2 lod2page dllearner}, + owner = {jl}, + timestamp = {2011.08.23} +} + +@INPROCEEDINGS{autosparql, + author = {Jens Lehmann and Lorenz Bühmann}, + title = {AutoSPARQL: Let Users Query Your Knowledge Base}, + booktitle = {Proceedings of ESWC 2011}, + year = {2011}, + keywords = {2011 group_aksw mole event_eswc lehmann buehmann sys:relevantFor:infai + dllearner sys:relevantFor:bis sys:relevantFor:lod2 lod2page peer-reviewed}, + owner = {jl}, + timestamp = {2011.03.22}, + url = {http://jens-lehmann.org/files/2011/autosparql_eswc.pdf} +} \ No newline at end of file Modified: trunk/interfaces/doc/manual/manual.tex =================================================================== --- trunk/interfaces/doc/manual/manual.tex 2011-11-30 16:09:23 UTC (rev 3452) +++ trunk/interfaces/doc/manual/manual.tex 2011-11-30 17:01:10 UTC (rev 3453) @@ -38,9 +38,23 @@ \maketitle \begin{abstract} +\vspace{-15pt} DL-Learner is a machine learning framework for OWL and description logics. It includes several learning algorithms and is easy to extend. DL-Learner widens the scope of Inductive Logic Programming to description logics and the Semantic Web. This manual provides the entry point to using DL-Learner and explains its basic concepts. + +Please refer to the following publication (BibTeX) when citing DL-Learner: +\begin{verbatim} + @Article{dllearner_jmlr, + author = "Jens Lehmann", + title = "{DL-Learner:} Learning Concepts in Description Logics", + journal = "Journal of Machine Learning Research (JMLR)", + year = "2009", + volume = "10", + pages = "2639--2642" +} +\end{verbatim} \end{abstract} +\vspace{-25pt} \tableofcontents \clearpage @@ -56,44 +70,43 @@ In the most common scenario we consider, we have a background knowledge base in OWL/DLs and additionally, we are given positive and negative examples. Each example is an individual in our knowledge base. The goal is to find an OWL \emph{class expression}\footnote{\owlce} such that all/many of the positive examples are \emph{instances} of this expression and none/few of the negative examples are instances of it. The primary purpose of learning is to find a class expression, which can classify unseen individuals (i.e.~not belonging to the examples) correctly. It is also important that the obtained class expression is easy to understand for a domain expert. We call these criteria \emph{accuracy} and \emph{readability}. -As an example, consider the problem to find out whether a chemical compound can cause cancer\footnote{see \carc{} for a more detailed description}. In this case, the background knowledge contains information about chemical compounds in general and certain concrete compounds we are interested in. The positive examples are those compounds causing cancer, whereas the negative examples are those compounds not causing cancer. The prediction for the examples has been obtained from experiments and long-term research trials in this case. Of course, all examples have to be described in the considered background knowledge. A learning algorithm can now derive a class expression from examples and background knowledge, e.g.~such a class expression in natural language could be ``chemical compounds containing a phosphorus atom''. (Of course, in practice the expression will be more complex to obtain a reasonable accuracy.) Using this class expression, we can not classify unseen chemical compounds. +As an example, consider the problem to find out whether a chemical compound can cause cancer\footnote{see \carc{} for a more detailed description}. In this case, the background knowledge contains information about chemical compounds in general and certain concrete compounds we are interested in. The positive examples are those compounds causing cancer, whereas the negative examples are those compounds not causing cancer. The prediction for the examples has been obtained from experiments and long-term research trials in this case. Of course, all examples have to be described in the considered background knowledge. A learning algorithm can now derive a class expression from examples and background knowledge, e.g.~such a class expression in natural language could be ``chemical compounds containing a phosphorus atom''. (Of course, in practice the expression will be more complex to obtain a reasonable accuracy.) Using this class expression, we can now classify unseen chemical compounds. +Please note that the latest versions of DL-Learner are not limited to OWL class expressions anymore. There is also preliminary support for learning simple SPARQL queries~\cite{autosparql}. Preliminary support for fuzzy OWL class expressions~\cite{fuzzy} is also included, but requires setting up a fuzzy OWL reasoner. Please contact us via the DL-Learner discussion list if you plan to do this. + \section{Getting Started} \label{sec:start} -DL-Learner is written in Java, i.e.~it can be used on almost all platforms. Currently, Java 6 or higher is required. To install the latest release, please visit the download page\footnote{\dldownload} and extract the file on your harddisk. In the top level directory, you will notice several executables. Those files ending with \verb|bat| are Windows executables, whereas the corresponding files without file extension are the Non-Windows (e.g.~Linux, Mac) executables. To test whether DL-Learner works, please run the following on the command line depending on your operating system: +DL-Learner is written in Java, i.e.~it can be used on almost all platforms. Currently, Java 6 or higher is required. To install the latest release, please visit the download page\footnote{\dldownload} and extract the file on your harddisk. In the \verb|bin| directory, you will notice several executables. Those files ending with \verb|bat| are Windows executables, whereas the corresponding files without file extension are the Non-Windows (e.g.~Linux, Mac) executables. To test whether DL-Learner works, please run the following on the command line depending on your operating system: \begin{verbatim} -dllearner examples/father.conf (Non-Windows Operating System) -dllearner.bat examples/father.conf (Windows Operating System) +./dllearner ../examples/father.conf (Non-Windows Operating System) +dllearner.bat ..\examples\father.conf (Windows Operating System) \end{verbatim} -\emph{Conf files}, e.g. \verb|examples/father.conf| in this case, describe the learning problem and specify which algorithm you want to use to solve it. In the simplest case they just say where to find the background knowledge to use (in the OWL file \verb|examples/father.owl| in this case) and the positive and negative examples (marked by ``+'' and ``-'', respectively). When running the above command, you should get something similar to the following: +\emph{Conf files}, e.g. \verb|examples/father.conf| in this case, describe the learning problem and specify which algorithm you want to use to solve it. In the simplest case they just say where to find the background knowledge to use (in the OWL file \verb|examples/father.owl| in this case) and the positive and negative examples. When running the above command, you should get something similar to the following: \begin{verbatim} -DL-Learner 2010-08-07 command line interface -starting component manager ... OK (157ms) -initialising component "OWL file" ... OK (0ms) -initialising component "fast instance checker" ... OK (842ms) -initialising component "pos neg learning problem" ... OK (0ms) -initialising component "refinement operator based - learning algorithm II" ... OK (14ms) +DL-Learner command line interface +Initializing Component "OWL File"... OK (0ms) +Initializing Component "fast instance checker"... OK (835ms) +Initializing Component "PosNegLPStandard"... OK (0ms) +Initializing Component "OWL Class Expression Learner"... OK (21ms) starting top down refinement with: Thing (50% accuracy) more accurate (83,33%) class expression found: male solutions (at most 20 are shown): 1: (male and hasChild some Thing) (accuracy 100%, length 5, depth 3) -Algorithm terminated successfully. +Algorithm terminated successfully (2236 descriptions tested). number of retrievals: 4 -retrieval reasoning time: 0ms (0ms per retrieval) -number of instance checks: 93 (0 multiple) -instance check reasoning time: 1ms ( 0ms per instance check) -overall reasoning time: 1ms (11,016% of overall runtime) -overall algorithm runtime: 17ms +retrieval reasoning time: 0ms ( 0ms per retrieval) +number of instance checks: 7455 (0 multiple) +instance check reasoning time: 54ms ( 0ms per instance check) +overall reasoning time: 54ms \end{verbatim} -The first part of the output tells you which components are used (more on this in Section \ref{sec:components}). In the second part you see output coming from the used learning algorithm, i.e.~it can print information while running (``more accurate (83,33\%) class description found'') and the final solutions, it computed. The results are displayed in Manchester OWL Syntax\footnote{\mos}. There can be several solutions, in which case they are ordered with the most promising one in the first position. In this case the only solution is \verb|male and hasChild some Thing| defining the class father. The last part of the output contains some runtime statistics. +The first part of the output tells you which components are used (more on this in Section \ref{sec:components}). In the second part you see output coming from the used learning algorithm, i.e.~it can print information while running (``more accurate (83,33\%) class expression found'') and the final solutions, it computed. The results are displayed in Manchester OWL Syntax\footnote{\mos}. There can be several solutions, in which case they are ordered with the most promising one in the first position. In this case the only solution is \verb|male and hasChild some Thing| defining the class father. The last part of the output contains some runtime statistics. \section{DL-Learner Architecture} @@ -105,9 +118,9 @@ \label{fig:structure} \end{figure} -To be flexible in integrating new learning algorithms, new kinds of learning problems, new knowledge bases, and new reasoner implementations, DL-Learner uses a component based model. Adding a component can be done by subclassing the appropriate class and adding the name of the new class to the “components.ini” file (more on that in Section \ref{sec:developing}). +To be flexible in integrating new learning algorithms, new kinds of learning problems, new knowledge bases, and new reasoner implementations, DL-Learner uses a component based model. Adding a component can be done by implementing the appropriate Java interface and adding appropriate annotations (more on that in Section \ref{sec:developing}). -There are four types of components (knowledge source, reasoning service, learning problem, learning algorithm). For each type, there are several implemented components and each component can have its own configuration options as illustrated in Figure \ref{fig:components}. Configuration options can be used to change parameters/settings of a component. In Section \ref{sec:components}, we describe the components in DL-Learner and their configuration options. +There are four common types of components (knowledge source, reasoning service, learning problem, learning algorithm). DL-Learner is not restricted to those types, i.e.~others can easily be added, but we limit ourselves to those four to make this manual easier to read. For each type, there are several implemented components and each component can have its own configuration options as illustrated in Figure \ref{fig:components}. Configuration options can be used to change parameters/settings of a component. In Section \ref{sec:components}, we describe the components in DL-Learner and their configuration options. \begin{figure} \includegraphics[width=\textwidth]{components_print} @@ -118,21 +131,27 @@ \section{DL-Learner Components} \label{sec:components} -In this part, we describe concrete components currently implemented in DL-Learner. Each of the subsections contains a list of components according to the type specified in the subsection heading. Note that this does not constitute a full description, i.e.~we omit some components and many configuration options. The purpose of the manual is to obtain a general understanding of the implemented components. A full list, which is generated automatically from the source code, can be found in \verb|doc/configOptions.txt| including the default values for all options and their usage in conf files. +In this part, we describe concrete components currently implemented in DL-Learner. Each of the subsections contains a list of components according to the type specified in the subsection heading. Note that this does not constitute a full description, i.e.~we omit some components and many configuration options. The purpose of the manual is to obtain a general understanding of the implemented components. A full list, which is generated automatically from the source code, can be found in \verb|doc/configOptions.html| including the default values for all options and their usage in conf files. The file is also available online at \url{http://dl-learner.svn.sourceforge.net/viewvc/dl-learner/trunk/interfaces/doc/configOptions.html}. \subsection{Knowledge Sources} -Knowledge sources have a URI and can be included in conf files using \verb|import("$url");|, e.g.~\verb|import("ontology.owl")|. Depending on the file ending, DL-Learner will guess the correct type of knowledge source. If you want to overwrite this, you can use a second parameter with value \verb|OWL|, \verb|KB|, or \verb|SPARQL|, e.g.~\verb|import("ontology.owl","OWL")|. +%Knowledge sources have a URI and can be included in conf files using \verb|import("$url");|, e.g.~\verb|import("ontology.owl")|. Depending on the file ending, DL-Learner will guess the correct type of knowledge source. If you want to overwrite this, you can use a second parameter with value \verb|OWL|, \verb|KB|, or \verb|SPARQL|, e.g.~\verb|import("ontology.owl","OWL")|. +The following contains some knowledge sources implemented in DL-Learner. To give an example, this is how a local OWL file can be declared as knowlege source in a conf file: +\begin{verbatim} +ks.type = "OWL File" +ks.fileName = "father.owl" +\end{verbatim} \begin{description} \item[OWL File] DL-Learner supports OWL files in different formats, e.g. RDF/XML or N-Triples. If there is a standard OWL format, you want to use, but is not supported by DL-Learner please let us know. We use the OWL API for parsing, so all formats supported by it can be used\footnote{ for a list see \owlapi}. - \item[KB File] KB files are an internal non-standardised knowledge representation format, which corresponds to description logic syntax except that the special symbols have been replaced by ASCII strings, e.g.~\verb|AND| instead of $\sqcap$. You can find several KB files in the examples folder. The \verb|doc/kbFileSyntax.txt| contains an EBNF description of the language. + \item[KB File] KB files are an internal non-standardised knowledge representation format, which corresponds to description logic syntax except that the special symbols have been replaced by ASCII strings, e.g.~\verb|AND| instead of $\sqcap$. You can find several KB files in the examples folder. A description of the syntax is available online\footnote{\url{http://dl-learner.svn.sourceforge.net/viewvc/dl-learner/trunk/interfaces/doc/kbFileSyntax.txt}}. If in doubt, please use the standard OWL syntax formats. +%The \verb|doc/kbFileSyntax.txt| contains an EBNF description of the language. \item[SPARQL Endpoint] DL-Learner allows to use SPARQL endpoints as background knowledge source, which enables the incorporation of very large knowledge bases, e.g. DBpedia\cite{2008_dbpedia}, in DL-Learner. This works by using a set of start instances, which usually correspond to the examples in a learning problem, and then retrieving knowledge about these instances via SPARQL queries. The obtained knowledge base fragment can be converted to OWL and consumed by a reasoner later since it is now sufficiently small to be processed in reasonable time. Please see \cite{2009_ijswis} for details about the knowledge fragment extraction algorithm. Some options of the SPARQL component are: \begin{itemize} \item instances: Set of individuals to use for starting the knowledge fragment extraction. Example use in conf file: \begin{verbatim}sparql.instances = {"http://dbpedia.org/resource/Matt_Stone", - "http://dbpedia.org/resource/Sarah_Silverman"};\end{verbatim} - \item recursionDepth: Maximum distance of an extracted individual from a start individual. This influences the size of the extracted fragment and depends on the maximum property depth you want the learned class expression to have. Example use in conf file: \verb|sparql.recursionDepth = 2;|. -\item saveExtractedFragment: Specifies whether the extracted ontology is written to a file or not. If set to true, then the OWL file is written to the cache dir. Example usage: \verb|sparql.saveExtractedFragment = true;| + "http://dbpedia.org/resource/Sarah_Silverman"}\end{verbatim} + \item recursionDepth: Maximum distance of an extracted individual from a start individual. This influences the size of the extracted fragment and depends on the maximum property depth you want the learned class expression to have. Example use in conf file: \verb|sparql.recursionDepth = 2|. +\item saveExtractedFragment: Specifies whether the extracted ontology is written to a file or not. If set to true, then the OWL file is written to the cache dir. Example usage: \verb|sparql.saveExtractedFragment = true| \end{itemize} Many further options allow to modify the extracted fragment on the fly or fine-tune the extraction process. The extraction can be started separately by running and modifying \verb|org.dllearner.test.SparqlExtractionTest|. The collected ontology will be saved in the DL-Learner directory. @@ -140,30 +159,34 @@ \subsection{Reasoner Components} -Several reasoner components are implemented, which can be interfaces to concrete reasoner implementations. To select a component in a conf file, use \verb|reasoner=$value;|, where \verb|$value| is one of \verb|digReasoner|, \verb|fastInstanceChecker|, or \verb|owlAPIReasoner|, which are explained below. Note that OWLlink reasoners can be attached via the OWL API interface. +Several reasoner components are implemented, which can be interfaces to concrete reasoner implementations. +%To select a component in a conf file, use \verb|reasoner=$value;|, where \verb|$value| is one of \verb|digReasoner|, \verb|fastInstanceChecker|, or \verb|owlAPIReasoner|, which are explained below. +Note that OWLlink reasoners can be attached via the OWL API interface. \begin{description} \item[OWL API] The OWL API reasoner interface can be used in conjunction with the Pellet, FaCT++, HermiT and OWLlink reasoners. The only option allows to switch between them: \begin{itemize} - \item reasonerType: Selects the desired reasoner. By default, Pellet is used. Usage: \verb|owlAPIReasoner.reasonerType = fact;|. Pellet, FaCT++ and HermiT are already included in DL-Learner. Note that for FaCT++, you need to add -Djava.library.path=lib/fact/64bit (or 32bit) to the Java command. You can also use an external OWLlink reasoner by setting the reasoner type to \verb|owllink|. You can then use the option \verb|owlLinkURL| to specify the URL of the OWLlink reasoner (http://localhost:8080/ by default). + \item reasonerType: Selects the desired reasoner. By default, Pellet is used. Usage: \verb|owlAPIReasoner.reasonerType = fact|. Pellet, FaCT++ and HermiT are already included in DL-Learner. Note that for FaCT++, you need to add -Djava.library.path=lib/fact/64bit (or 32bit) to the Java command. You can also use an external OWLlink reasoner by setting the reasoner type to \verb|owllink|. You can then use the option \verb|owlLinkURL| to specify the URL of the OWLlink reasoner (http://localhost:8080/ by default). \end{itemize} - \item[DIG] DIG 1.1\footnote{\dig} is an interface to description logic reasoners and supported by a large variety of reasoners including Pellet, FaCT++, KAON2, and Racer Pro. The major drawback is that the current version DIG 1.1 is not aligned with the OWL specification and therefore lacks several features, which are crucial to the more recent learning algorithms in DL-Learner. If you still want to use the DIG interface, you have to download a DIG capable reasoner and start the DIG server there. DL-Learner communicates with the reasoner using the XML based protocol over HTTP. + \item[DIG] DIG 1.1\footnote{\dig} is an interface to description logic reasoners and supported by a large variety of reasoners including Pellet, FaCT++, KAON2, and Racer Pro. The major drawback is that the current version DIG 1.1 is not aligned with the OWL specification and therefore lacks several features, which are crucial to the more recent learning algorithms in DL-Learner. If you still want to use the DIG interface, you have to download a DIG capable reasoner and start the DIG server there. DL-Learner communicates with the reasoner using the XML based protocol over HTTP. In the latest versions of DL-Learner, DIG support is considered unsupported. You can use it at your own risk, but we will not maintain this part of the code and may remove it in the future. \item[Fast Instance Checker] Instance checks, i.e.~testing whether an individual is instance of a class, is the major reasoner task in many learning algorithms. This reasoner is a self-development of the DL-Learner project. It remedies some problems related to Machine Learning and the Open World Assumption in OWL and therefore is not correct w.r.t.~OWL semantics. (See \cite{cheng00} Section 4 for an explanation.) Furthermore, it provides an improved performance for instance checks by precomputing some inferences and keeping them in memory. The fast instance checker is build on top of Pellet and the default reasoner component in DL-Learner. \end{description} \subsection{Learning Problems} -In the introductory Sections \ref{sec:whatis} and \ref{sec:start}, we described a specific learning problem where positive and negative examples are given. In practice different variations of similar problems occur. You can switch between the different problems using \verb|problem=$value;|, where \verb|$value| is one of \verb|posNegLPStandard|, \verb|posOnlyLP|, \verb|classLearning|. The default is \verb|posNegLPStandard|. +In the introductory Sections \ref{sec:whatis} and \ref{sec:start}, we described a specific learning problem where positive and negative examples are given. In practice different variations of similar problems occur. +%You can switch between the different problems using \verb|problem=$value;|, where \verb|$value| is one of \verb|posNegLPStandard|, \verb|posOnlyLP|, \verb|classLearning|. The default is \verb|posNegLPStandard|. \begin{description} - \item[Positive and Negative Examples] Let the name of the background ontology be $\mathcal{O}$. The goal in this learning problem is to find an OWL class expression $C$ such that all/many positive examples are instances of $C$ w.r.t.~$\mathcal{O}$ and none/few negative examples are instances of $C$ w.r.t.~$\mathcal{O}$. As explained previously, $C$ should be learned such that it generalises to unseen individuals and is readable. The important configuration options of this component are obviously the positive and negative examples, which are often indicated with \verb|+| and \verb|-| signs in conf files as an optional shortcut to using e.g.~\verb|posNegLPStandard.positiveExamples = {...}|. + \item[Positive and Negative Examples] Let the name of the background ontology be $\mathcal{O}$. The goal in this learning problem is to find an OWL class expression $C$ such that all/many positive examples are instances of $C$ w.r.t.~$\mathcal{O}$ and none/few negative examples are instances of $C$ w.r.t.~$\mathcal{O}$. As explained previously, $C$ should be learned such that it generalises to unseen individuals and is readable. The important configuration options of this component are obviously the positive and negative examples, which you can specify via, e.g.~\verb|posNegLPStandard.positiveExamples = {...}|. \item[Positive Examples] This learning problem is similar to the one before, but without negative examples. In this case, it is desirable to find a class expression which closely fits the positive examples while still generalising sufficiently well. For instance, you usually do not want to have \verb|owl:Thing| as a solution for this problem, but neither do you want to have an enumeration of all examples. \item[Class Learning] In class learning, you are given an existing class $A$ within your ontology $\mathcal{O}$ and want to describe it. It is similar to the previous problem in that you can use the instances of the class as positive examples. However, there are some differences, e.g.~you do not want to have $A$ itself as a proposed solution of the problem, and since this is an ontology engineering task, the focus on short and readable class expressions is stronger than for the two problems mentioned before. The learner can also take advantage of existing knowledge about the class to describe. \end{description} \subsection{Learning Algorithms} -The implemented algorithms vary from very simple (and usually inappropriate) algorithms to sophisticated ones. You can switch between the different algorithms using \verb|algorithm=$value;|, where \verb|$value| is one of \verb|bruteForce|, \verb|random|, \verb|gp|, \verb|refinement|, \verb|refexamples|, \verb|celoe|, \verb|el| and \verb|disjunctiveEL|. The default is \verb|refexamples|. +The implemented algorithms vary from very simple (and usually inappropriate) algorithms to sophisticated ones. +%You can switch between the different algorithms using \verb|algorithm=$value;|, where \verb|$value| is one of \verb|bruteForce|, \verb|random|, \verb|gp|, \verb|refinement|, \verb|refexamples|, \verb|celoe|, \verb|el| and \verb|disjunctiveEL|. The default is \verb|refexamples|. \begin{description} \item[Brute Force]: This algorithm tests all class expressions up to a specified length, which you can set using e.g.~\verb|bruteForce.maxlength = 7|. @@ -173,10 +196,10 @@ \item number of individuals: The individual count is the size of each generation in a GP algorithm. It is one of the most crucial parameters. Setting it to a higher value usually means investing more computational resource for increasing the likelihood that a solution will be found. Usage: \verb|gp.numberOfIndividuals = 100|. \item refinement probability: This is used to specify how likely the usage of the genetic refinement operator should be, e.g.~\verb|gp.refinementProbability = 0.6| means that it will be selected 60\% of the time. \end{itemize} - The GP algorithm has 15 more options documented in \verb|doc/configOptions.txt|. + The GP algorithm has 15 more options documented in \verb|doc/configOptions.html|. \item[Refinement] This is a top down refinement operator approach, which is described in \cite{alc_learning_algorithm} and based on insights in \cite{property_analysis}. Some options include: \begin{itemize} - \item target language: The standard target language of this algorithm is $\mathcal{ALCN(D)}$. However, you can change the language, i.e.~you can exclude the $\forall$ constructor by using \verb|refinement.useAllConstructor = false;|. Similar options exist for $\exists$, $\neg$, cardinality restrictions, and boolean datatypes. + \item target language: The standard target language of this algorithm is $\mathcal{ALCN(D)}$. However, you can change the language, i.e.~you can exclude the $\forall$ constructor by using \verb|refinement.useAllConstructor = false|. Similar options exist for $\exists$, $\neg$, cardinality restrictions, and boolean datatypes. \item maximum execution time: If there is no perfect solution of a given problem, the algorithm can potentially run forever (in practice it will run out of memory). It is therefore often interesting to limit the execution time. You can use e.g.~\verb|refinement.maxExecutionTimeInSeconds = 100| to say that the algorithm should run for at most 100 seconds. Often, it will run slightly longer than the maximum execution time since it waits for the next internal loop of the algorithm to stop gracefully. \end{itemize} The algorithm supports a range of further options. For instance, one can specify which classes and properties must not occur in resulting class expressions. @@ -201,23 +224,27 @@ \section{DL-Learner Interfaces} -\subsection{Command Line Interface} +% \subsection{Command Line Interface} -\todo{Description of Conf File Syntax; special cases: empty set of beans is denoted by ``-''; the name of a bean must not be ``true'' or ``false'' or start with a number; config options must not have the name ``type''; these conentions are introduced in order to be a able to provide a very compact syntax} +%\todo{Description of Conf File Syntax; special cases: empty set of beans is denoted by ``-''; the name of a bean must not be ``true'' or ``false'' or start with a number; config options must not have the name ``type''; these conentions are introduced in order to be a able to provide a very compact syntax} -One interface you have already used in Section \ref{sec:start} is the command line. There are two executables, which can be used for starting DL-Learner on the commandline: \verb|dl-learner| and \verb|quickstart|. The first one takes a conf file as argument, whereas the latter one lists all conf files in the examples folder and allows you to select one of those. +One interface you have already used in Section \ref{sec:start} is the command line. There are two executables, which can be used for starting DL-Learner on the commandline: \verb|dl-learner| and \verb|quickstart|. The first one takes a conf file as argument, whereas the latter one lists all conf files in the examples folder and allows you to select one of those. There are a lot of conf files available in the \verb|/examples| directory, which you can use a base for your own experiments. -\begin{figure} - \centering - \includegraphics[width=.8\textwidth]{screenshots/gui_algorithm} - \caption{GUI screenshot showing the learning algorithm tab. The UI allows you to set different options and then proceed to the next tab and execute the algorithm.} - \label{fig:gui} -\end{figure} +%\begin{figure} +% \centering +% \includegraphics[width=.8\textwidth]{screenshots/gui_algorithm} +% \caption{GUI screenshot showing the learning algorithm tab. The UI allows you to set different options and then proceed to the next tab and execute the algorithm.} +% \label{fig:gui} +%\end{figure} -Apart from the command line, there is also a prototypical graphical interface. You can use \verb|gui| (or \verb|gui.bat|) to start it. Optionally, a conf file can be passed as argument. The main GUI window has four tabs corresponding to the four different types of components and a run tab to execute the learning algorithm. Using the GUI, you can assemble the desired combination of components and options. The \verb|File| menu allows you to load a conf file or save the current configuration to a conf file. The GUI implementation is currently prototypical, so please report any bugs or feature requests you have (see Section \ref{sec:contact}). Since the GUI uses the component manager, it will automatically evolve when new components and options are added. +DL-Learner had a graphical user interface and a web service. In 2011, DL-Learner was generalised to be able to solve general learning problems and arbitrarily combine components, which was necessary for learning SPARQL queries and fuzzy OWL class expressions. The commandline interface has been adapted for this purpose, but the graphical user interface and web service are currently under construction and will be re-introduced at a later stage. -A third interface through which DL-Learner can be accessed programmatically is a web service. You can execute \verb|ws| (or \verb|ws.bat|) to start the web service. It is based on the Java API for XML Web Services (JAX-WS), which is included in Java 6 or higher. Executing the command will start a web server on port 8181 of your local machine. The WSDL can be accessed via \url{http://localhost:8181/services?wsdl}. You can use a WSDL viewer to see the supported operations or view the JavaDoc of the corresponding Java file\footnote{viewable online at \wsjavadoc}. Some examples for calling the web service from PHP can be found in the DL-Learner subversion repository\footnote{in the directory src/php-examples/:\\ \wsphpexamples}. +% Web Service and GUI "under construction" at the moment +%Apart from the command line, there is also a prototypical graphical interface. You can use \verb|gui| (or \verb|gui.bat|) to start it. Optionally, a conf file can be passed as argument. The main GUI window has four tabs corresponding to the four different types of components and a run tab to execute the learning algorithm. Using the GUI, you can assemble the desired combination of components and options. The \verb|File| menu allows you to load a conf file or save the current configuration to a conf file. The GUI implementation is currently prototypical, so please report any bugs or feature requests you have (see Section \ref{sec:contact}). Since the GUI uses the component manager, it will automatically evolve when new components and options are added. + +%A third interface through which DL-Learner can be accessed programmatically is a web service. You can execute \verb|ws| (or \verb|ws.bat|) to start the web service. It is based on the Java API for XML Web Services (JAX-WS), which is included in Java 6 or higher. Executing the command will start a web server on port 8181 of your local machine. The WSDL can be accessed via \url{http://localhost:8181/services?wsdl}. You can use a WSDL viewer to see the supported operations or view the JavaDoc of the corresponding Java file\footnote{viewable online at \wsjavadoc}. Some examples for calling the web service from PHP can be found in the DL-Learner subversion repository\footnote{in the directory src/php-examples/:\\ \wsphpexamples}. + Another means to access DL-Learner, in particular for ontology engineering, is to use the OntoWiki and Protégé plugins. The OntoWiki plugin is not officially released yet, but can be used in the SVN version of OntoWiki. The Protégé 4 plugin can be installed either by downloading it from the DL-Learner download page or directly within Protégé 4 by clicking on ``File'', ``Preferences'', ``Plugins'', ``Check for Downloads'' now and selecting the DL-Learner plugin. For more information and a screencast see the Protégé plugin wiki page \footnote{\wikiprotplugin}. \section{Extending DL-Learner} @@ -225,7 +252,7 @@ DL-Learner is open source and component based. If you want to develop a specific part or extension of a class expression learning algorithm for OWL, then you are invited to use DL-Learner as a base. This allows you to focus on the part you want to implement while being able to use DL-Learner as a library and access it through one of the interfaces. -If you want to create a new component, then you first have to decide on the type of your component. To implement a concrete component, you have to subclass one of the following classes and implement their abstract methods: +If you want to create a new component, then you first have to decide on the type of your component. To implement a concrete component, you can implement one of the following interfaces (list is incomplete): \begin{itemize} \item org.dllearner.core.KnowledgeSource @@ -234,9 +261,94 @@ \item org.dllearner.core.LearningAlgorithm \end{itemize} -You then have to add your component to \verb|lib/components.ini| such that it is registered in the component manager when DL-Learner starts up. If you want to use configuration options in your component, you need to create a static method as follows: +%You then have to add your component to \verb|lib/components.ini| such that it is registered in the component manager when DL-Learner starts up. If you want to use configuration options in your component, you need to create a static method as follows: +That is sufficient for using your component programmatically in combination with existing DL-Learner components. +If your class name is \verb|org.example.TestAlgorithm|, then you can instantiate your class in a conf file via: +\begin{verbatim} +c.type = "org.example.TestAlgorithm" +\end{verbatim} +As you have probably seen by now in various conf files, DL-Learner allows to configure components. This is done via standard Java Beans. If you want to create a conf option \verb|testOption|, you just need to create a variable with getters and setters in your code: + \begin{verbatim} +public class TestAlgorithm implements LearningAlgorithm { + + private double testOption = 0.0; + + [...] + + public double getTestOption() { + return testOption; + } + + public void setTestOption(double testOption) { + this.startNodeBonus = startNodeBonus; + } + +} +\end{verbatim} + +That would be sufficient to include your components in conf files: +\begin{verbatim} +c.type = "org.example.TestAlgorithm" +c.testOption = 0.3 +\end{verbatim} + +In your code, you should have an empty default constructor and an \verb|init()| method (as required by the Component interface). The default constructor will be called first, followed by setter methods and then the \verb|init()| method. This is a standard Java Beans approach. In summary, you need to the following: +\begin{itemize} + \item implement an appropriate DL-Learner interface for what you want to do + \item add variables for all config options as well as getters and setters for them + \item if you implement a constructor, please also add a default constructor with an empty set of arguments +\end{itemize} + +By only requiring those few steps, we want to make adding further components to DL-Learner as lightweight as possible. + +If you are familiar with the Spring framework\footnote{\url{http://www.springsource.org}}, then it is helpful to know that conf files are just an abbreviated syntax for Spring XML configurations. You can use all powerful features of Spring in your code, which we do not describe in full detail here. In fact, you can convert the conf files to Spring configuration files by adding those lines: + +\begin{verbatim} +cli.type = "org.dllearner.cli.CLI" +cli.writeSpringConfiguration = true +\end{verbatim} + +If you added this in \verb|test.conf|, then this generates a \verb|file.xml|, which is the Spring equivalent of conf file. + +If you are a DL-Learner developer and want to properly document your component\footnote{}, you should do some further steps: +\begin{itemize} + \item add your class to \verb|AnnComponentManager.componentClassNames| + \item add an annotation for your class + \item add annotations for all configuration options +\end{itemize} + +An example of an annotated class could look as follows: + +\begin{verbatim} +@ComponentAnn(name="Test Algorithm", shortName="ta", version=0.1, + description="My first experiment.") +public class TestAlgorithm implements LearningAlgorithm { ... + + @ConfigOption(name="testOption", defaultValue="0.0", + description="The option allows to control xyz.") + private double testOption = 0.0; + + [...] + + public double getTestOption() { + return testOption; + } + + public void setTestOption(double testOption) { + this.startNodeBonus = startNodeBonus; + } + +} +\end{verbatim} + +The \verb|@ComponentAnn| annotation allow to mark classes as DL-Learner components. Similarly, the \verb|@ConfigOption| annotations marks variables as configuration options. That should be those variables, which you want the user to be able to configure and play with. A benefit of adding the extra metadata provided by the annotations is that the component will appear in documentation pages such as \url{http://dl-learner.svn.sourceforge.net/viewvc/dl-learner/trunk/interfaces/doc/configOptions.html}. In general, they provide users of your component with useful information. + +\begin{comment} +You can then add your class to + +\begin{verbatim} public static Collection<ConfigOption<?>> createConfigOptions() { List<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); options.add(new IntegerConfigOption("maxDepth", @@ -260,6 +372,7 @@ \end{itemize} Restricting to these option types this gives us the possibility to build very flexible user interfaces. Whenever, a new component or a new configuration option for a component is added, the current user interfaces (GUI, web service, commandline) will automatically support it without any or only minimal code changes. +\end{comment} This quick introduction only serves as an entry point to get you started. For more detailed questions about how to extend DL-Learner, please drop us a message in the DL-Learner mailing list. @@ -276,6 +389,7 @@ \item Latest Release: \url{http://sourceforge.net/project/showfiles.php?group_id=203619} \end{itemize} +\nocite{*} \bibliographystyle{apalike} \bibliography{bibliography} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-12-01 14:21:58
|
Revision: 3461 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3461&view=rev Author: jenslehmann Date: 2011-12-01 14:21:48 +0000 (Thu, 01 Dec 2011) Log Message: ----------- fixed bug in CELOE Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java trunk/examples/nlp2rdf/sample/sample2.conf Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-12-01 13:21:18 UTC (rev 3460) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-12-01 14:21:48 UTC (rev 3461) @@ -244,7 +244,7 @@ } // singleSuggestionMode = configurator.getSingleSuggestionMode(); - + /* // create refinement operator if(operator == null) { operator = new RhoDRDown(); @@ -255,6 +255,22 @@ ((RhoDRDown)operator).setObjectPropertyHierarchy(reasoner.getObjectPropertyHierarchy()); ((RhoDRDown)operator).setDataPropertyHierarchy(reasoner.getDatatypePropertyHierarchy()); ((RhoDRDown)operator).init(); + */ + // create a refinement operator and pass all configuration + // variables to it + if(operator == null) { + // we use a default operator and inject the class hierarchy for now + operator = new RhoDRDown(); + ((RhoDRDown)operator).setStartClass(startClass); + ((RhoDRDown)operator).setReasoner(reasoner); + ((RhoDRDown)operator).init(); + } + // TODO: find a better solution as this is quite difficult to debug + ((RhoDRDown)operator).setSubHierarchy(classHierarchy); + ((RhoDRDown)operator).setObjectPropertyHierarchy(reasoner.getObjectPropertyHierarchy()); + ((RhoDRDown)operator).setDataPropertyHierarchy(reasoner.getDatatypePropertyHierarchy()); + + // operator = new RhoDRDown(reasoner, classHierarchy, startClass, configurator); baseURI = reasoner.getBaseURI(); prefixes = reasoner.getPrefixes(); Modified: trunk/examples/nlp2rdf/sample/sample2.conf =================================================================== --- trunk/examples/nlp2rdf/sample/sample2.conf 2011-12-01 13:21:18 UTC (rev 3460) +++ trunk/examples/nlp2rdf/sample/sample2.conf 2011-12-01 14:21:48 UTC (rev 3461) @@ -30,6 +30,7 @@ lp.negativeExamples = {"nif:offset_81_107_The+eaten+apple+was+", "nif:offset_108_132_The+apple+ate+the+ap", "nif:offset_133_160_The+boy+will+eat+the"} // create a refinement operator and configure it + op.type = "rho" op.useNegation = false op.useAllConstructor = false This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-05 08:59:38
|
Revision: 3469 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3469&view=rev Author: lorenz_b Date: 2011-12-05 08:59:27 +0000 (Mon, 05 Dec 2011) Log Message: ----------- Moved QTL example to test. Added Paths: ----------- trunk/test/qtl/ trunk/test/qtl/dbpedia_simple.conf Removed Paths: ------------- trunk/examples/qtl/ Added: trunk/test/qtl/dbpedia_simple.conf =================================================================== --- trunk/test/qtl/dbpedia_simple.conf (rev 0) +++ trunk/test/qtl/dbpedia_simple.conf 2011-12-05 08:59:27 UTC (rev 3469) @@ -0,0 +1,12 @@ + +prefixes = [ ("dbr","http://dbpedia.org/resource/") ] + +ks.type = "SPARQL" +ks.url = "http://live.dbpedia.org/sparql" +ks.defaultGraphURIs = { "http://dbpedia.org" } + +// learning problem +lp.type = "posonlylp" +lp.positiveExamples = { "dbr:Bob_Dylan", "dbr:The_Beatles", "dbr:Aerosmith" } + +alg.type = "qtl" This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-12-07 15:08:10
|
Revision: 3481 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3481&view=rev Author: jenslehmann Date: 2011-12-07 15:07:59 +0000 (Wed, 07 Dec 2011) Log Message: ----------- commented out one unit test and moved some examples to test directory Modified Paths: -------------- trunk/components-core/src/test/java/org/dllearner/test/junit/ConfigOptionTest.java Added Paths: ----------- trunk/test/nlp2rdf/reuters_gold_vs_copper/ trunk/test/nlp2rdf/sample/ Removed Paths: ------------- trunk/examples/nlp2rdf/reuters_gold_vs_copper/ trunk/examples/nlp2rdf/sample/ Modified: trunk/components-core/src/test/java/org/dllearner/test/junit/ConfigOptionTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/ConfigOptionTest.java 2011-12-06 17:08:59 UTC (rev 3480) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/ConfigOptionTest.java 2011-12-07 15:07:59 UTC (rev 3481) @@ -38,21 +38,23 @@ @Test public void testConfigOption() { - List<ConfigOption> configOptions = ConfigHelper.getConfigOptions(CELOE.class); - assertFalse(configOptions.isEmpty()); + // now outdated, because property editors do not need to be specified for each option - CELOE celoe = new CELOE(); - celoe.setMaxExecutionTimeInSeconds(10); - Map<ConfigOption,Object> optionValues = ConfigHelper.getConfigOptionValues(celoe); - boolean found = false; - for(Entry<ConfigOption,Object> entry : optionValues.entrySet()) { - System.out.println(entry.getKey() + " " + entry.getValue()); - if(entry.getKey().name().equals("maxExecutionTimeInSeconds")) { - found = true; - assertTrue(Integer.valueOf(entry.getValue().toString())==10); - } - } - assertTrue(found); +// List<ConfigOption> configOptions = ConfigHelper.getConfigOptions(CELOE.class); +// assertFalse(configOptions.isEmpty()); +// +// CELOE celoe = new CELOE(); +// celoe.setMaxExecutionTimeInSeconds(10); +// Map<ConfigOption,Object> optionValues = ConfigHelper.getConfigOptionValues(celoe); +// boolean found = false; +// for(Entry<ConfigOption,Object> entry : optionValues.entrySet()) { +// System.out.println(entry.getKey() + " " + entry.getValue()); +// if(entry.getKey().name().equals("maxExecutionTimeInSeconds")) { +// found = true; +// assertTrue(Integer.valueOf(entry.getValue().toString())==10); +// } +// } +// assertTrue(found); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-12-07 15:18:34
|
Revision: 3482 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3482&view=rev Author: jenslehmann Date: 2011-12-07 15:18:27 +0000 (Wed, 07 Dec 2011) Log Message: ----------- removed ext dependency for release Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java trunk/interfaces/pom.xml trunk/interfaces/src/test/java/org/dllearner/cli/NLP2RDFCLITest.java Removed Paths: ------------- trunk/examples/nlp2rdf/ Modified: trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2011-12-07 15:07:59 UTC (rev 3481) +++ trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2011-12-07 15:18:27 UTC (rev 3482) @@ -71,7 +71,7 @@ "org.dllearner.algorithms.properties.SubDataPropertyOfAxiomLearner", "org.dllearner.algorithms.DisjointClassesLearner", "org.dllearner.algorithms.SimpleSubclassLearner", - "org.dllearner.algorithm.qtl.QTL", +// "org.dllearner.algorithm.qtl.QTL", "org.dllearner.kb.KBFile", "org.dllearner.kb.OWLFile", "org.dllearner.kb.SparqlEndpointKS", Modified: trunk/interfaces/pom.xml =================================================================== --- trunk/interfaces/pom.xml 2011-12-07 15:07:59 UTC (rev 3481) +++ trunk/interfaces/pom.xml 2011-12-07 15:18:27 UTC (rev 3482) @@ -202,10 +202,12 @@ <groupId>org.dllearner</groupId> <artifactId>components-core</artifactId> </dependency> + <!-- <dependency> <groupId>org.dllearner</groupId> <artifactId>components-ext</artifactId> </dependency> + --> <!-- Added the dependency of the core tests so that they will be accessible from the tests in this component --> <dependency> Modified: trunk/interfaces/src/test/java/org/dllearner/cli/NLP2RDFCLITest.java =================================================================== --- trunk/interfaces/src/test/java/org/dllearner/cli/NLP2RDFCLITest.java 2011-12-07 15:07:59 UTC (rev 3481) +++ trunk/interfaces/src/test/java/org/dllearner/cli/NLP2RDFCLITest.java 2011-12-07 15:18:27 UTC (rev 3482) @@ -14,7 +14,7 @@ @Test public void sampleTest() throws IOException { - File f = new File("../examples/nlp2rdf/sample/sample1.conf"); + File f = new File("../test/nlp2rdf/sample/sample1.conf"); // File f = new File("../examples/nlp2rdf/learning_initial_6/dbpedia_spotlight_plus/copper17_vs_gold35.conf"); CLI cli = new CLI(f); cli.init(); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2011-12-07 15:24:24
|
Revision: 3483 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3483&view=rev Author: lorenz_b Date: 2011-12-07 15:24:10 +0000 (Wed, 07 Dec 2011) Log Message: ----------- Some changes for release. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java trunk/interfaces/pom.xml trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java trunk/interfaces/src/main/java/org/dllearner/cli/QuickStart.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/SimpleSubclassLearner.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -226,7 +226,7 @@ l.setReasoner(reasoner); ConfigHelper.configure(l, "maxExecutionTimeInSeconds", 10); - l.setClassToDescribe(new NamedClass("http://dbpedia.org/ontology/AdministrativeRegion")); + l.setClassToDescribe(new NamedClass("http://dbpedia.org/ontology/Olympics")); l.init(); l.start(); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/EquivalentObjectPropertyAxiomLearner.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -19,7 +19,9 @@ package org.dllearner.algorithms.properties; +import java.net.URL; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -36,6 +38,7 @@ import org.dllearner.core.owl.EquivalentObjectPropertiesAxiom; import org.dllearner.core.owl.ObjectProperty; import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.learningproblems.AxiomScore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -132,5 +135,15 @@ return axioms; } + public static void main(String[] args) throws Exception{ + EquivalentObjectPropertyAxiomLearner l = new EquivalentObjectPropertyAxiomLearner(new SparqlEndpointKS(new SparqlEndpoint( + new URL("http://dbpedia.aksw.org:8902/sparql"), Collections.singletonList("http://dbpedia.org"), Collections.<String>emptyList())));//.getEndpointDBpediaLiveAKSW())); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/country")); + l.setMaxExecutionTimeInSeconds(10); + l.init(); + l.start(); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(5, 0.75)); + } + } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/IrreflexiveObjectPropertyAxiomLearner.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -19,6 +19,8 @@ package org.dllearner.algorithms.properties; +import java.net.MalformedURLException; +import java.net.URL; import java.util.ArrayList; import org.dllearner.core.AbstractAxiomLearningAlgorithm; @@ -101,10 +103,14 @@ logger.info("...finished in {}ms.", (System.currentTimeMillis()-startTime)); } - public static void main(String[] args) { - IrreflexiveObjectPropertyAxiomLearner l = new IrreflexiveObjectPropertyAxiomLearner(new SparqlEndpointKS(SparqlEndpoint.getEndpointDBpediaLiveAKSW())); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/thumbnail")); + public static void main(String[] args) throws Exception { + SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql"))); + + IrreflexiveObjectPropertyAxiomLearner l = new IrreflexiveObjectPropertyAxiomLearner(ks); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/author")); l.start(); + + System.out.println(l.getCurrentlyBestEvaluatedAxioms(10, 0.75)); } } Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/properties/ObjectPropertyDomainAxiomLearner.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -176,21 +176,21 @@ } public static void main(String[] args) throws Exception{ - SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint(new URL("http://factforge.net/sparql")));//.getEndpointDBpediaLiveAKSW())); + SparqlEndpointKS ks = new SparqlEndpointKS(new SparqlEndpoint(new URL("http://dbpedia.aksw.org:8902/sparql")));//.getEndpointDBpediaLiveAKSW())); SPARQLReasoner reasoner = new SPARQLReasoner(ks); -// reasoner.prepareSubsumptionHierarchy(); + reasoner.prepareSubsumptionHierarchy(); ObjectPropertyDomainAxiomLearner l = new ObjectPropertyDomainAxiomLearner(ks); l.setReasoner(reasoner); - l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/industry")); + l.setPropertyToDescribe(new ObjectProperty("http://dbpedia.org/ontology/officialLanguage")); l.setMaxExecutionTimeInSeconds(10); - l.setReturnOnlyNewAxioms(true); +// l.setReturnOnlyNewAxioms(true); l.init(); l.start(); - System.out.println(l.getCurrentlyBestEvaluatedAxioms(5)); + System.out.println(l.getCurrentlyBestEvaluatedAxioms(10, 0.75)); } } Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -28,6 +28,7 @@ import java.util.SortedSet; import java.util.TreeSet; +import org.aksw.commons.jena.CollectionResultSet; import org.dllearner.core.config.BooleanEditor; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.IntegerEditor; @@ -190,6 +191,13 @@ queryExecution.setDefaultGraphURIs(ks.getEndpoint().getDefaultGraphURIs()); queryExecution.setNamedGraphURIs(ks.getEndpoint().getNamedGraphURIs()); +// ResultSet resultSet = null; +// try { +// resultSet = queryExecution.execSelect(); +// } catch (Exception e) { +// logger.error("Got a timeout during query execution.", e); +// resultSet = new CollectionResultSet(Collections.<String>emptyList(), Collections.<QuerySolution>emptyList()); +// } ResultSet resultSet = queryExecution.execSelect(); return resultSet; Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/ConciseBoundedDescriptionGeneratorImpl.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -80,15 +80,19 @@ StringBuilder sb = new StringBuilder(); sb.append("CONSTRUCT {\n"); sb.append("<").append(resource).append("> ").append("?p0 ").append("?o0").append(".\n"); + sb.append("?p0 a ?type0.\n"); for(int i = 1; i < depth; i++){ sb.append("?o").append(i-1).append(" ").append("?p").append(i).append(" ").append("?o").append(i).append(".\n"); + sb.append("?p").append(i).append(" ").append("a").append(" ").append("?type").append(i).append(".\n"); } sb.append("}\n"); sb.append("WHERE {\n"); sb.append("<").append(resource).append("> ").append("?p0 ").append("?o0").append(".\n"); + sb.append("?p0 a ?type0.\n"); for(int i = 1; i < depth; i++){ sb.append("OPTIONAL{\n"); sb.append("?o").append(i-1).append(" ").append("?p").append(i).append(" ").append("?o").append(i).append(".\n"); + sb.append("?p").append(i).append(" ").append("a").append(" ").append("?type").append(i).append(".\n"); } for(int i = 1; i < depth; i++){ sb.append("}"); @@ -96,7 +100,6 @@ sb.append("}\n"); sb.append("LIMIT ").append(limit).append("\n"); sb.append("OFFSET ").append(offset); - return sb.toString(); } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -514,7 +514,11 @@ // policy: returned sets are clones, i.e. can be modified // (of course we only have to clone the leafs of a class description tree) if (description instanceof NamedClass) { - return (TreeSet<Individual>) classInstancesPos.get((NamedClass) description).clone(); + if(classInstancesPos.containsKey((NamedClass) description)){ + return (TreeSet<Individual>) classInstancesPos.get((NamedClass) description).clone(); + } else { + return new TreeSet<Individual>(); + } } else if (description instanceof Negation) { if(description.getChild(0) instanceof NamedClass) { return (TreeSet<Individual>) classInstancesNeg.get((NamedClass) description.getChild(0)).clone(); Modified: trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -351,7 +351,7 @@ // we do not need the temporary set anymore and let the // garbage collector take care of it valueFrequency = null; - dataValueFrequency = null; + dataValueFrequency.clear();// = null; // System.out.println("freqDataValues: " + frequentDataValues); Modified: trunk/interfaces/pom.xml =================================================================== --- trunk/interfaces/pom.xml 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/interfaces/pom.xml 2011-12-07 15:24:10 UTC (rev 3483) @@ -15,7 +15,7 @@ </parent> <properties> - <release.name>1.0-alpha-2</release.name> + <release.name>1.0-beta-1</release.name> </properties> <profiles> @@ -202,12 +202,10 @@ <groupId>org.dllearner</groupId> <artifactId>components-core</artifactId> </dependency> - <!-- - <dependency> + <!--dependency> <groupId>org.dllearner</groupId> <artifactId>components-ext</artifactId> - </dependency> - --> + </dependency--> <!-- Added the dependency of the core tests so that they will be accessible from the tests in this component --> <dependency> @@ -279,6 +277,12 @@ <groupId>org.json</groupId> <artifactId>json</artifactId> </dependency> + + <dependency> + <groupId>commons-lang</groupId> + <artifactId>commons-lang</artifactId> + <version>2.3</version> +</dependency> <!--BEGIN Logging Dependencies--> Modified: trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -123,7 +123,7 @@ AbstractCELA la = context.getBean(AbstractCELA.class); new CrossValidation(la,lp,rs,nrOfFolds,false); } else { - knowledgeSource = context.getBean(KnowledgeSource.class); + knowledgeSource = context.getBeansOfType(KnowledgeSource.class).entrySet().iterator().next().getValue(); algorithm = context.getBean(LearningAlgorithm.class); algorithm.start(); } Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -128,6 +128,7 @@ import org.semanticweb.owlapi.io.SystemOutDocumentTarget; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAnnotation; +import org.semanticweb.owlapi.model.OWLAnnotationProperty; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLIndividual; @@ -223,6 +224,7 @@ AbstractReasonerComponent rcCached; private Set<OWLAxiom> learnedOWLAxioms; + private Set<EvaluatedAxiom> learnedEvaluatedAxioms; public Enrichment(SparqlEndpoint se, Entity resource, double threshold, int nrOfAxiomsToLearn, boolean useInference, boolean verbose) { this.se = se; @@ -263,6 +265,7 @@ algorithmRuns = new LinkedList<AlgorithmRun>(); learnedOWLAxioms = new HashSet<OWLAxiom>(); + learnedEvaluatedAxioms = new HashSet<EvaluatedAxiom>(); } public void start() throws ComponentInitException, IllegalArgumentException, SecurityException, InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException, LearningProblemUnsupportedException, MalformedURLException { @@ -444,7 +447,7 @@ learnedAxioms.add(new EvaluatedAxiom(axiom, score)); } System.out.println(prettyPrint(learnedAxioms)); - + learnedEvaluatedAxioms.addAll(learnedAxioms); algorithmRuns.add(new AlgorithmRun(CELOE.class, learnedAxioms, ConfigHelper.getConfigOptionValues(la))); return learnedAxioms; } @@ -485,7 +488,7 @@ List<EvaluatedAxiom> learnedAxioms = learner .getCurrentlyBestEvaluatedAxioms(nrOfAxiomsToLearn, threshold); System.out.println(prettyPrint(learnedAxioms)); - + learnedEvaluatedAxioms.addAll(learnedAxioms); for(EvaluatedAxiom evAx : learnedAxioms){ learnedOWLAxioms.add(OWLAPIAxiomConvertVisitor.convertAxiom(evAx.getAxiom())); } @@ -665,7 +668,7 @@ return model; } - private OWLOntology getGeneratedOntology(){ + public OWLOntology getGeneratedOntology(){ OWLOntology ontology = null; try { OWLOntologyManager man = OWLManager.createOWLOntologyManager(); @@ -677,6 +680,31 @@ return ontology; } + public OWLOntology getGeneratedOntology(boolean withConfidenceAsAnnotations){ + OWLOntology ontology = null; + try { + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = man.getOWLDataFactory(); + if(withConfidenceAsAnnotations){ + OWLAnnotationProperty confAnnoProp = factory.getOWLAnnotationProperty(IRI.create(EnrichmentVocabulary.NS + "confidence")); + Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(); + for(EvaluatedAxiom evAx : learnedEvaluatedAxioms){ + OWLAxiom ax = OWLAPIAxiomConvertVisitor.convertAxiom(evAx.getAxiom()); + ax = ax.getAnnotatedAxiom(Collections.singleton( + factory.getOWLAnnotation(confAnnoProp, factory.getOWLLiteral(evAx.getScore().getAccuracy())))); + axioms.add(ax); + } + ontology = man.createOntology(axioms); + } else { + ontology = man.createOntology(learnedOWLAxioms); + } + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + return ontology; + } + /* * Write axioms in Turtle syntax. */ @@ -748,6 +776,8 @@ "Specifies whether to use inference. If yes, the schema will be loaded into a reasoner and used for computing the scores.").withOptionalArg().ofType(Boolean.class).defaultsTo(true); parser.acceptsAll(asList("s", "serialize"), "Specify a file where the ontology with all axioms can be written.") .withRequiredArg().ofType(File.class); + parser.acceptsAll(asList("a", "annotations"), + "Specifies whether to save scores as annotations.").withOptionalArg().ofType(Boolean.class).defaultsTo(true); // parse options and display a message for the user in case of problems OptionSet options = null; try { @@ -885,7 +915,7 @@ if(options.has("s")){ File file = (File)options.valueOf("s"); try { - OWLOntology ontology = e.getGeneratedOntology(); + OWLOntology ontology = e.getGeneratedOntology(options.has("a")); OutputStream os = new BufferedOutputStream(new FileOutputStream(file)); OWLManager.createOWLOntologyManager().saveOntology(ontology, new RDFXMLOntologyFormat(), os); } catch (OWLOntologyStorageException e1) { Modified: trunk/interfaces/src/main/java/org/dllearner/cli/QuickStart.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/QuickStart.java 2011-12-07 15:18:27 UTC (rev 3482) +++ trunk/interfaces/src/main/java/org/dllearner/cli/QuickStart.java 2011-12-07 15:24:10 UTC (rev 3483) @@ -177,7 +177,7 @@ public static void getAllConfs(File f, String path, Map<String, ArrayList<String>> confs) { path = path + File.separator; // System.out.println(path); - String[] act = f.list(); + String[] act = f.list();System.out.println(f); for (int i = 0; i < act.length; i++) { // System.out.println(act[i]); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2011-12-09 13:52:52
|
Revision: 3495 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3495&view=rev Author: jenslehmann Date: 2011-12-09 13:52:45 +0000 (Fri, 09 Dec 2011) Log Message: ----------- fixes for enrichment script Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/ROLearner2.java trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQuery.java trunk/components-core/src/main/java/org/dllearner/reasoning/DIGHTTPConnector.java trunk/components-core/src/main/java/org/dllearner/utilities/Files.java trunk/components-core/src/main/java/org/dllearner/utilities/JamonMonitorLogger.java trunk/interfaces/src/main/java/org/dllearner/cli/CrossValidation.java trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java trunk/interfaces/src/main/java/org/dllearner/cli/Start.java trunk/scripts/src/main/java/org/dllearner/examples/Alzheimer.java trunk/scripts/src/main/java/org/dllearner/examples/BreastTissue.java trunk/scripts/src/main/java/org/dllearner/examples/Carcinogenesis.java trunk/scripts/src/main/java/org/dllearner/examples/Cardiotocography.java trunk/scripts/src/main/java/org/dllearner/examples/Heart.java trunk/scripts/src/main/java/org/dllearner/examples/Mammographic.java trunk/scripts/src/main/java/org/dllearner/examples/MonogenicDiseases.java trunk/scripts/src/main/java/org/dllearner/examples/Mutagenesis.java trunk/scripts/src/main/java/org/dllearner/examples/Parkinsons.java trunk/scripts/src/main/java/org/dllearner/examples/Suramin.java trunk/scripts/src/main/java/org/dllearner/scripts/CrossValidation.java trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBible.java trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -489,7 +489,7 @@ if (replaceSearchTree) Files.createFile(new File(searchTreeFile), treeString); else - Files.appendFile(new File(searchTreeFile), treeString); + Files.appendToFile(new File(searchTreeFile), treeString); } // System.out.println(loop); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/fuzzydll/FuzzyCELOE.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -507,7 +507,7 @@ if (replaceSearchTree) Files.createFile(new File(searchTreeFile), treeString); else - Files.appendFile(new File(searchTreeFile), treeString); + Files.appendToFile(new File(searchTreeFile), treeString); } // System.out.println(loop); Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/ROLearner2.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/ROLearner2.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/ocel/ROLearner2.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -475,7 +475,7 @@ if (replaceSearchTree) Files.createFile(searchTreeFile, treeString); else - Files.appendFile(searchTreeFile, treeString); + Files.appendToFile(searchTreeFile, treeString); } // Anzahl Schleifendurchläufe Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/refinement/ROLearner.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -521,7 +521,7 @@ if(replaceSearchTree) Files.createFile(searchTreeFile, treeString); else - Files.appendFile(searchTreeFile, treeString); + Files.appendToFile(searchTreeFile, treeString); }//write search tree // Anzahl Schleifendurchläufe Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -357,7 +357,7 @@ File jamonlog = new File("log/jamon.html"); Files.createFile(jamonlog, MonitorFactory.getReport()); - Files.appendFile(jamonlog, "<xmp>\n" + Files.appendToFile(jamonlog, "<xmp>\n" + JamonMonitorLogger.getStringForAllSortedByLabel()); System.exit(0); } Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQuery.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQuery.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlQuery.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -245,7 +245,7 @@ Files.createFile(f, s + "\n"); logDeletedOnStart = true; } else { - Files.appendFile(f, s + "\n"); + Files.appendToFile(f, s + "\n"); } } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/DIGHTTPConnector.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/DIGHTTPConnector.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/DIGHTTPConnector.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -176,7 +176,7 @@ osw.close(); if(protocolFile != null) - Files.appendFile(protocolFile, "DIG code send to reasoner:\n\n"+send+"\n\n"); + Files.appendToFile(protocolFile, "DIG code send to reasoner:\n\n"+send+"\n\n"); // receive answer InputStream is = connection.getInputStream(); @@ -198,7 +198,7 @@ // } if(protocolFile != null) - Files.appendFile(protocolFile, "DIG code received from reasoner:\n\n"+answer+"\n\n"); + Files.appendToFile(protocolFile, "DIG code received from reasoner:\n\n"+answer+"\n\n"); return answer.toString(); } Modified: trunk/components-core/src/main/java/org/dllearner/utilities/Files.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/Files.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/utilities/Files.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -186,7 +186,7 @@ * @param content * Content of the file. */ - public static void appendFile(File file, String content) { + public static void appendToFile(File file, String content) { try { FileOutputStream fos = new FileOutputStream(file, true); fos.write(content.getBytes()); Modified: trunk/components-core/src/main/java/org/dllearner/utilities/JamonMonitorLogger.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/JamonMonitorLogger.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/components-core/src/main/java/org/dllearner/utilities/JamonMonitorLogger.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -168,7 +168,7 @@ public static void writeHTMLReport(String filename){ File jamonlog = new File(filename); Files.createFile(jamonlog, MonitorFactory.getReport()); - Files.appendFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); + Files.appendToFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); } Modified: trunk/interfaces/src/main/java/org/dllearner/cli/CrossValidation.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/CrossValidation.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/interfaces/src/main/java/org/dllearner/cli/CrossValidation.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -275,7 +275,7 @@ private void outputWriter(String output) { if(writeToFile) { - Files.appendFile(outputFile, output +"\n"); + Files.appendToFile(outputFile, output +"\n"); System.out.println(output); } else { System.out.println(output); Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -199,7 +199,7 @@ // restrict tested number of entities per type (only for testing purposes); // should be set to -1 in production mode - private int maxEntitiesPerType = -1; + int maxEntitiesPerType = -1; // number of axioms which will be learned/considered (only applies to // some learners) @@ -583,8 +583,11 @@ OWLNamedIndividual knowldegeBaseInd = f.getOWLNamedIndividual(IRI.create(ks.getEndpoint().getURL())); ax = f.getOWLClassAssertionAxiom(EnrichmentVocabulary.SPARQLEndpoint, knowldegeBaseInd); axioms.add(ax); - ax = f.getOWLObjectPropertyAssertionAxiom(EnrichmentVocabulary.defaultGraph, knowldegeBaseInd, f.getOWLNamedIndividual(IRI.create(ks.getEndpoint().getDefaultGraphURIs().iterator().next()))); - axioms.add(ax); + if(!ks.getEndpoint().getDefaultGraphURIs().isEmpty()) { + // TODO: only writes one default graph + ax = f.getOWLObjectPropertyAssertionAxiom(EnrichmentVocabulary.defaultGraph, knowldegeBaseInd, f.getOWLNamedIndividual(IRI.create(ks.getEndpoint().getDefaultGraphURIs().iterator().next()))); + axioms.add(ax); + } ax = f.getOWLObjectPropertyAssertionAxiom(EnrichmentVocabulary.hasInput, algorithmRunInd, knowldegeBaseInd); axioms.add(ax); Modified: trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -22,6 +22,7 @@ import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; +import java.io.PrintStream; import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.net.URL; @@ -31,6 +32,8 @@ import java.util.Map; import java.util.Map.Entry; +import javax.xml.ws.http.HTTPException; + import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; @@ -41,11 +44,14 @@ import org.dllearner.kb.SparqlEndpointKS; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.kb.sparql.SparqlQuery; +import org.dllearner.utilities.Files; import org.semanticweb.owlapi.model.OWLAxiom; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Model; +import com.hp.hpl.jena.sparql.engine.http.QueryExceptionHTTP; +import com.hp.hpl.jena.sparql.resultset.ResultSetException; /** * Enriches all of the LOD cloud. @@ -124,19 +130,49 @@ // run enrichment SparqlEndpoint se = endpoint.getValue(); String name = endpoint.getKey(); + + File f = new File(baseDir + name + ".ttl"); + File log = new File(baseDir + name + ".log"); + System.out.println("Enriching " + name + " using " + se); Enrichment e = new Enrichment(se, null, threshold, nrOfAxiomsToLearn, useInference, false); - e.start(); - // save results to a file - SparqlEndpointKS ks = new SparqlEndpointKS(se); - List<AlgorithmRun> runs = e.getAlgorithmRuns(); - List<OWLAxiom> axioms = new LinkedList<OWLAxiom>(); - for(AlgorithmRun run : runs) { - axioms.addAll(e.toRDF(run.getAxioms(), run.getAlgorithm(), run.getParameters(), ks)); + + e.maxEntitiesPerType = 3; // hack for faster testing of endpoints + + boolean success = false; + // run enrichment script - we make a case distinguish to see which kind of problems we get + // (could be interesting for statistics later on) + try { + e.start(); + success = true; + } catch(StackOverflowError error) { + error.printStackTrace(new PrintStream(log)); + Files.appendToFile(log, "stack overflows could be caused by cycles in class hierarchies"); + error.printStackTrace(); + } catch(ResultSetException ex) { + ex.printStackTrace(new PrintStream(log)); + Files.appendToFile(log, ex.getMessage()); + ex.printStackTrace(); + } catch(QueryExceptionHTTP ex) { + ex.printStackTrace(new PrintStream(log)); + Files.appendToFile(log, ex.getMessage()); + ex.printStackTrace(); + } +// catch(Exception ex) { +// System.out.println("class of exception: " + ex.getClass()); +// } + + // save results to a file (TODO: check if enrichment format + if(success) { + SparqlEndpointKS ks = new SparqlEndpointKS(se); + List<AlgorithmRun> runs = e.getAlgorithmRuns(); + List<OWLAxiom> axioms = new LinkedList<OWLAxiom>(); + for(AlgorithmRun run : runs) { + axioms.addAll(e.toRDF(run.getAxioms(), run.getAlgorithm(), run.getParameters(), ks)); + } + Model model = e.getModel(axioms); + model.write(new FileOutputStream(f), "TURTLE"); } - Model model = e.getModel(axioms); - File f = new File(baseDir + name + ".ttl"); - model.write(new FileOutputStream(f), "TURTLE"); } } Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Start.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Start.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Start.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -216,7 +216,7 @@ // write JaMON report in HTML file File jamonlog = new File("log/jamon.html"); Files.createFile(jamonlog, MonitorFactory.getReport()); - Files.appendFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); + Files.appendToFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); } /** Modified: trunk/scripts/src/main/java/org/dllearner/examples/Alzheimer.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Alzheimer.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Alzheimer.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -766,7 +766,7 @@ content.append("-\"" + getIndividual(key) + "\"\n"); } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static void generateConfFile(File file) { @@ -778,7 +778,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/alzheimer/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } // returns URI including quotationsmark (need for KBparser) Modified: trunk/scripts/src/main/java/org/dllearner/examples/BreastTissue.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/BreastTissue.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/BreastTissue.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -296,7 +296,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/breasttissue/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generateExamples(File file, HashMap<String, Integer> patients, int i) { @@ -311,7 +311,7 @@ } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getURI(String name) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/Carcinogenesis.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Carcinogenesis.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Carcinogenesis.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -268,7 +268,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/carcinogenesis/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(confTrainFile, confHeader); + Files.appendToFile(confTrainFile, confHeader); // generating training examples File trainingFilePositives = new File(prologDirectory + "train.f"); @@ -291,7 +291,7 @@ appendNegExamples(confTrainFile, negPTE1Examples); if(createPTE1Conf) { Files.clearFile(confPTE1File); - Files.appendFile(confPTE1File, "import(\"pte.owl\");\nreasoner=fastInstanceChecker;\n\n"); + Files.appendToFile(confPTE1File, "import(\"pte.owl\");\nreasoner=fastInstanceChecker;\n\n"); appendPosExamples(confPTE1File, posPTE1Examples); appendNegExamples(confPTE1File, negPTE1Examples); } @@ -300,8 +300,8 @@ if(createPTE2Conf) { File confPTE2File = new File("examples/carcinogenesis/testpte2.conf"); Files.clearFile(confPTE2File); - Files.appendFile(confPTE2File, "import(\"pte.owl\");\nreasoner=fastInstanceChecker;\n\n"); - Files.appendFile(confPTE2File, getPTE2Examples()); + Files.appendToFile(confPTE2File, "import(\"pte.owl\");\nreasoner=fastInstanceChecker;\n\n"); + Files.appendToFile(confPTE2File, getPTE2Examples()); } } @@ -492,7 +492,7 @@ else content.append("-\""+example.toString()+"\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } public static void appendNegExamples(File file, List<Individual> examples) { @@ -503,7 +503,7 @@ else content.append("+\""+example.toString()+"\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getAtomClass(String element, String atomType) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/Cardiotocography.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Cardiotocography.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Cardiotocography.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -546,7 +546,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/cardiotocography/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generateExamples(File file, HashMap<String, Integer> patients, int i) { @@ -561,7 +561,7 @@ } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getURI(String name) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/Heart.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Heart.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Heart.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -307,7 +307,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/heart/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generateExamples(File file) { @@ -321,7 +321,7 @@ content.append("-\"" + getIndividual(key) + "\"\n"); } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getURI(String name) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/Mammographic.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Mammographic.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Mammographic.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -255,7 +255,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/mammographic/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generateExamples(File file) { @@ -269,7 +269,7 @@ content.append("-\"" + getIndividual(key) + "\"\n"); } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getURI(String name) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/MonogenicDiseases.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/MonogenicDiseases.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/MonogenicDiseases.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -361,61 +361,61 @@ String pt = rs.getString("phenotype"); if ((pt.toLowerCase().contains("polymorphism"))&&( neg_count<=negEx )) { if (writeAlephFiles) { - Files.appendFile(badFile, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(badFile, "deleterious(id"+mutationID+").\n"); } if (writeYYFiles) { - Files.appendFile(yybadFile, "http://example.com/mutation/mutation#mutation"+mutationID+"\n"); + Files.appendToFile(yybadFile, "http://example.com/mutation/mutation#mutation"+mutationID+"\n"); } if (cvAleph){ switch (kn) { case 1: aneg++; - Files.appendFile(split1n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split1n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 2: aneg++; - Files.appendFile(split2n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split2n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 3: aneg++; - Files.appendFile(split3n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split3n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 4: aneg++; - Files.appendFile(split4n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split4n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 5: aneg++; - Files.appendFile(split5n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split5n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 6: aneg++; - Files.appendFile(split6n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split6n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 7: aneg++; - Files.appendFile(split7n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split7n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 8: aneg++; - Files.appendFile(split8n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split8n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 9: aneg++; - Files.appendFile(split9n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split9n, "deleterious(id"+mutationID+").\n"); if (aneg == neps) {aneg = 0; kn++;} break; case 10: aneg++; - Files.appendFile(split10n, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split10n, "deleterious(id"+mutationID+").\n"); // if (aneg == neps) {aneg = 0; kn++;} break; // case 11: // without comment its round negExamples / 10 @@ -427,61 +427,61 @@ } if ((!pt.toLowerCase().contains("polymorphism"))&& ( pos_count<=posEx)) { if (writeAlephFiles) { - Files.appendFile(posFile, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(posFile, "deleterious(id"+mutationID+").\n"); } if (writeYYFiles) { - Files.appendFile(yyposFile, "http://example.com/mutation/mutation#mutation"+mutationID+"\n"); + Files.appendToFile(yyposFile, "http://example.com/mutation/mutation#mutation"+mutationID+"\n"); } if (cvAleph){ switch (kp) { case 1: apos++; - Files.appendFile(split1f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split1f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 2: apos++; - Files.appendFile(split2f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split2f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 3: apos++; - Files.appendFile(split3f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split3f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 4: apos++; - Files.appendFile(split4f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split4f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 5: apos++; - Files.appendFile(split5f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split5f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 6: apos++; - Files.appendFile(split6f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split6f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 7: apos++; - Files.appendFile(split7f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split7f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 8: apos++; - Files.appendFile(split8f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split8f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 9: apos++; - Files.appendFile(split9f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split9f, "deleterious(id"+mutationID+").\n"); if (apos == peps) {apos = 0; kp++;} break; case 10: apos++; - Files.appendFile(split10f, "deleterious(id"+mutationID+").\n"); + Files.appendToFile(split10f, "deleterious(id"+mutationID+").\n"); // if (apos == peps) {apos = 0; kp++;} break; // case 11: // without comments its round to posExamples / 10 @@ -712,7 +712,7 @@ confHeader += "\n"; } - Files.appendFile(confFile, confHeader); + Files.appendToFile(confFile, confHeader); if(!generatePosExampleClass) { MonogenicDiseases.appendPosExamples(confFile, posExamples); MonogenicDiseases.appendNegExamples(confFile, negExamples); @@ -751,7 +751,7 @@ else content.append("-\""+example.toString()+"\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } public static void appendNegExamples(File file, List<Individual> examples) { @@ -762,6 +762,6 @@ else content.append("+\""+example.toString()+"\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } } Modified: trunk/scripts/src/main/java/org/dllearner/examples/Mutagenesis.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Mutagenesis.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Mutagenesis.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -243,7 +243,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/mutagenesis/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generatePositiveExamples(String fileName) @@ -439,7 +439,7 @@ else content.append("-\"" + example.toString() + "\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } public static void appendNegExamples(File file, List<Individual> examples) { @@ -450,7 +450,7 @@ else content.append("+\"" + example.toString() + "\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getAtomClass(String element, String atomType) { @@ -583,7 +583,7 @@ for (String negEx : negativeExamples) { content.append("-\"" + getIndividual(negEx) + "\"\n"); } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static BooleanDatatypePropertyAssertion getBooleanDatatypePropertyAssertion( Modified: trunk/scripts/src/main/java/org/dllearner/examples/Parkinsons.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Parkinsons.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Parkinsons.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -283,7 +283,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/parkinsons/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(file, confHeader); + Files.appendToFile(file, confHeader); } private static void generateExamples(File file) { @@ -298,7 +298,7 @@ } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } private static String getURI(String name) { Modified: trunk/scripts/src/main/java/org/dllearner/examples/Suramin.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/Suramin.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/examples/Suramin.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -171,7 +171,7 @@ confHeader += "refexamples.writeSearchTree = false;\n"; confHeader += "refexamples.searchTreeFile = \"log/suramin/searchTree.log\";\n"; confHeader += "\n"; - Files.appendFile(confTrainFile, confHeader); + Files.appendToFile(confTrainFile, confHeader); appendExamples(confTrainFile, posExamples); } @@ -382,7 +382,7 @@ content.append("-\""+getIndividual(compound.toString())+"\"\n"); } } - Files.appendFile(file, content.toString()); + Files.appendToFile(file, content.toString()); } } Modified: trunk/scripts/src/main/java/org/dllearner/scripts/CrossValidation.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/CrossValidation.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/scripts/CrossValidation.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -374,7 +374,7 @@ private void outputWriter(String output) { if(writeToFile) { - Files.appendFile(outputFile, output +"\n"); + Files.appendToFile(outputFile, output +"\n"); System.out.println(output); } else { System.out.println(output); Modified: trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBible.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBible.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBible.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -79,8 +79,8 @@ initLogger(); logger.info("Start"); Files.clearFile(file); - Files.appendFile(file, "neg Ex random: "+RANDOMNEGATIVES+"\n"); - Files.appendFile(file, "negfactor : "+NEGFACTOR+"\n"); + Files.appendToFile(file, "neg Ex random: "+RANDOMNEGATIVES+"\n"); + Files.appendToFile(file, "negfactor : "+NEGFACTOR+"\n"); //String fileURL = new File(ontologyFile).toURI().toString(); @@ -113,7 +113,7 @@ for (NamedClass target : classesToRelearn) { - Files.appendFile(file,"now learning: "+target+"\n"); + Files.appendToFile(file,"now learning: "+target+"\n"); waitForInput(); positiveEx.clear(); @@ -138,7 +138,7 @@ if(negativeEx.size()<0) { System.out.println(target); waitForInput(); - Files.appendFile(file, "\tSKIPPED negEX "+negativeEx+"\n"); + Files.appendToFile(file, "\tSKIPPED negEX "+negativeEx+"\n"); continue; } // reasoningService.prepareSubsumptionHierarchy(); @@ -151,7 +151,7 @@ e.printStackTrace(); } waitForInput(); - Files.appendFile(file, "*************\n"); + Files.appendToFile(file, "*************\n"); } Modified: trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java 2011-12-09 12:54:27 UTC (rev 3494) +++ trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java 2011-12-09 13:52:45 UTC (rev 3495) @@ -237,7 +237,7 @@ public static void writeJamonLog(String filename){ File jamonlog = new File(filename); Files.createFile(jamonlog, MonitorFactory.getReport()); - Files.appendFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); + Files.appendToFile(jamonlog, "<xmp>\n"+JamonMonitorLogger.getStringForAllSortedByLabel()); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-01-03 08:51:00
|
Revision: 3522 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3522&view=rev Author: lorenz_b Date: 2012-01-03 08:50:54 +0000 (Tue, 03 Jan 2012) Log Message: ----------- Enabled separate cache for each endpoint to avoid concurrency problems when running several threads in parallel. Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SPARQLTasks.java trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/SPARQLTasks.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SPARQLTasks.java 2012-01-03 04:33:55 UTC (rev 3521) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/SPARQLTasks.java 2012-01-03 08:50:54 UTC (rev 3522) @@ -33,6 +33,7 @@ import org.dllearner.utilities.datastructures.StringTuple; import org.dllearner.utilities.owl.OWLVocabulary; +import com.clarkparsia.owlapiv3.OWL; import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.query.ResultSetFactory; @@ -710,6 +711,9 @@ } } + //remove trivial classes + classes.remove(OWL.Nothing.toStringID()); + classes.remove(OWL.Thing.toStringID()); return classes; } Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2012-01-03 04:33:55 UTC (rev 3521) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2012-01-03 08:50:54 UTC (rev 3522) @@ -29,6 +29,7 @@ import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; +import java.io.UnsupportedEncodingException; import java.lang.reflect.InvocationTargetException; import java.math.BigInteger; import java.net.MalformedURLException; @@ -36,6 +37,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.net.URL; +import java.net.URLEncoder; import java.security.SecureRandom; import java.text.DecimalFormat; import java.util.ArrayList; @@ -104,6 +106,7 @@ import org.dllearner.core.owl.ObjectProperty; import org.dllearner.core.owl.SubClassAxiom; import org.dllearner.kb.SparqlEndpointKS; +import org.dllearner.kb.sparql.ExtractionDBCache; import org.dllearner.kb.sparql.SPARQLTasks; import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.kb.sparql.SparqlKnowledgeSource; @@ -208,6 +211,8 @@ private boolean useInference; private SPARQLReasoner reasoner; + private ExtractionDBCache cache; + private String cacheDir = "cache"; // lists of algorithms to apply private List<Class<? extends AxiomLearningAlgorithm>> objectPropertyAlgorithms; @@ -234,6 +239,14 @@ this.nrOfAxiomsToLearn = nrOfAxiomsToLearn; this.useInference = useInference; + try { + cacheDir = "cache" + File.separator + URLEncoder.encode(se.getURL().toString(), "UTF-8"); + } catch (UnsupportedEncodingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + cache = new ExtractionDBCache(cacheDir); + objectPropertyAlgorithms = new LinkedList<Class<? extends AxiomLearningAlgorithm>>(); objectPropertyAlgorithms.add(DisjointObjectPropertyAxiomLearner.class); objectPropertyAlgorithms.add(EquivalentObjectPropertyAxiomLearner.class); @@ -282,7 +295,7 @@ // ks.setSupportsSPARQL_1_1(supportsSPARQL_1_1); if(useInference){ - reasoner = new SPARQLReasoner(ks); + reasoner = new SPARQLReasoner(ks, cache); System.out.print("Precomputing subsumption hierarchy ... "); long startTime = System.currentTimeMillis(); reasoner.prepareSubsumptionHierarchy(); @@ -332,7 +345,13 @@ runDataPropertyAlgorithms(ks, (DatatypeProperty) resource); } else if(resource instanceof NamedClass) { System.out.println(resource + " appears to be a class. Running appropriate algorithms.\n"); - runClassLearningAlgorithms(ks, (NamedClass) resource); + try { + runClassLearningAlgorithms(ks, (NamedClass) resource); + } catch (Exception e) { + System.out.println(e.getCause()); + } catch (Error e) { + System.out.println(e.getCause()); + } } else { throw new Error("The type " + resource.getClass() + " of resource " + resource + " cannot be handled by this enrichment tool."); } @@ -401,6 +420,7 @@ ks2.setDefaultGraphURIs(new TreeSet<String>(ks.getEndpoint().getDefaultGraphURIs())); ks2.setUseLits(false); ks2.setUseCacheDatabase(true); + ks2.setCacheDir(cacheDir); ks2.setRecursionDepth(2); ks2.setCloseAfterRecursion(true); ks2.setSaveExtractedFragment(true); Modified: trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2012-01-03 04:33:55 UTC (rev 3521) +++ trunk/interfaces/src/main/java/org/dllearner/cli/GlobalEnrichment.java 2012-01-03 08:50:54 UTC (rev 3522) @@ -31,6 +31,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; @@ -74,7 +75,7 @@ //parameters for thread pool //Parallel running Threads(Executor) on System - private static int corePoolSize = 10; + private static int corePoolSize = 1; //Maximum Threads allowed in Pool private static int maximumPoolSize = 20; //Keep alive time for waiting threads for jobs(Runnable) @@ -103,7 +104,7 @@ Logger.getRootLogger().addAppender(consoleAppender); // get all SPARQL endpoints and their graphs - the key is a name-identifier - Map<String,SparqlEndpoint> endpoints = new HashMap<String,SparqlEndpoint>(); + Map<String,SparqlEndpoint> endpoints = new TreeMap<String,SparqlEndpoint>(); String query = ""; query += "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> \n"; @@ -130,7 +131,7 @@ ResultSet rs = sq.send(); while(rs.hasNext()) { QuerySolution qs = rs.next(); - String endpoint = qs.get("endpoint").toString(); + String endpoint = qs.get("endpoint").toString();System.out.println(endpoint); String shortName = qs.get("shortName").toString(); endpoints.put(shortName, new SparqlEndpoint(new URL(endpoint))); } @@ -170,18 +171,20 @@ // run enrichment script - we make a case distinguish to see which kind of problems we get // (could be interesting for statistics later on) try { - e.start(); - success = true; - } catch(StackOverflowError error) { try { + e.start(); + success = true; + } catch (Exception ex){ + ex.printStackTrace(new PrintStream(log)); + } catch(StackOverflowError error) { error.printStackTrace(new PrintStream(log)); - } catch (FileNotFoundException e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); + Files.appendToFile(log, "stack overflows could be caused by cycles in class hierarchies"); + error.printStackTrace(); } - Files.appendToFile(log, "stack overflows could be caused by cycles in class hierarchies"); - error.printStackTrace(); - } catch(ResultSetException ex) { + } catch (FileNotFoundException e2) { + e2.printStackTrace(); + } + /*catch(ResultSetException ex) { try { ex.printStackTrace(new PrintStream(log)); } catch (FileNotFoundException e1) { @@ -202,7 +205,7 @@ } catch(Exception ex) { System.out.println("class of exception: " + ex.getClass()); - } + }*/ // save results to a file (TODO: check if enrichment format if(success) { This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2012-01-05 17:42:22
|
Revision: 3530 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3530&view=rev Author: jenslehmann Date: 2012-01-05 17:42:15 +0000 (Thu, 05 Jan 2012) Log Message: ----------- start to re-integrate family benchmark Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java trunk/test/family-benchmark/Aunt.conf Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2012-01-05 15:15:28 UTC (rev 3529) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/celoe/CELOE.java 2012-01-05 17:42:15 UTC (rev 3530) @@ -178,11 +178,11 @@ @ConfigOption(name = "maxClassExpressionTestsAfterImprovement", defaultValue="0", description = "The maximum number of candidate hypothesis the algorithm is allowed after an improvement in accuracy (0 = no limit). The algorithm will stop afterwards. (The real number of tests can be slightly higher, because this criterion usually won't be checked after each single test.)") private int maxClassExpressionTestsAfterImprovement = 0; - @ConfigOption(defaultValue = "0", name = "maxExecutionTimeInSeconds", description = "maximum execution of the algorithm in seconds") - private int maxExecutionTimeInSeconds = 0; + @ConfigOption(defaultValue = "10", name = "maxExecutionTimeInSeconds", description = "maximum execution of the algorithm in seconds") + private int maxExecutionTimeInSeconds = 10; - @ConfigOption(defaultValue = "10", name = "maxExecutionTimeInSecondsAfterImprovement", description = "maximum execution of the algorithm in seconds") - private int maxExecutionTimeInSecondsAfterImprovement = 10; + @ConfigOption(defaultValue = "0", name = "maxExecutionTimeInSecondsAfterImprovement", description = "maximum execution of the algorithm in seconds") + private int maxExecutionTimeInSecondsAfterImprovement = 0; @ConfigOption(name = "terminateOnNoiseReached", defaultValue="false", description="specifies whether to terminate when noise criterion is met") private boolean terminateOnNoiseReached = false; @@ -219,6 +219,10 @@ @Override public void init() throws ComponentInitException { + if(maxExecutionTimeInSeconds != 0) { + maxExecutionTimeInSeconds = Math.min(maxExecutionTimeInSeconds, maxExecutionTimeInSecondsAfterImprovement); + } + // compute used concepts/roles from allowed/ignored // concepts/roles Set<NamedClass> usedConcepts; Modified: trunk/test/family-benchmark/Aunt.conf =================================================================== --- trunk/test/family-benchmark/Aunt.conf 2012-01-05 15:15:28 UTC (rev 3529) +++ trunk/test/family-benchmark/Aunt.conf 2012-01-05 17:42:15 UTC (rev 3530) @@ -1,88 +1,115 @@ -import("family-benchmark.owl"); -// TODO All Quantifier causes "wrong" areas of the search space to be explored -ocel.useAllConstructor = false; +// declare some prefixes to use as abbreviations +prefixes = [ ("ex","http://www.benchmark.org/family#") ] -+"http://www.benchmark.org/family#F2F14" -+"http://www.benchmark.org/family#F2F12" -+"http://www.benchmark.org/family#F2F19" -+"http://www.benchmark.org/family#F2F26" -+"http://www.benchmark.org/family#F2F28" -+"http://www.benchmark.org/family#F2F36" -+"http://www.benchmark.org/family#F3F52" -+"http://www.benchmark.org/family#F3F53" -+"http://www.benchmark.org/family#F5F62" -+"http://www.benchmark.org/family#F6F72" -+"http://www.benchmark.org/family#F6F79" -+"http://www.benchmark.org/family#F6F77" -+"http://www.benchmark.org/family#F6F86" -+"http://www.benchmark.org/family#F6F91" -+"http://www.benchmark.org/family#F6F84" -+"http://www.benchmark.org/family#F6F96" -+"http://www.benchmark.org/family#F6F101" -+"http://www.benchmark.org/family#F6F93" -+"http://www.benchmark.org/family#F7F114" -+"http://www.benchmark.org/family#F7F106" -+"http://www.benchmark.org/family#F7F116" -+"http://www.benchmark.org/family#F7F119" -+"http://www.benchmark.org/family#F7F126" -+"http://www.benchmark.org/family#F7F121" -+"http://www.benchmark.org/family#F9F148" -+"http://www.benchmark.org/family#F9F150" -+"http://www.benchmark.org/family#F9F143" -+"http://www.benchmark.org/family#F9F152" -+"http://www.benchmark.org/family#F9F154" -+"http://www.benchmark.org/family#F9F141" -+"http://www.benchmark.org/family#F9F160" -+"http://www.benchmark.org/family#F9F163" -+"http://www.benchmark.org/family#F9F158" -+"http://www.benchmark.org/family#F9F168" -+"http://www.benchmark.org/family#F10F174" -+"http://www.benchmark.org/family#F10F179" -+"http://www.benchmark.org/family#F10F181" -+"http://www.benchmark.org/family#F10F192" -+"http://www.benchmark.org/family#F10F193" -+"http://www.benchmark.org/family#F10F186" -+"http://www.benchmark.org/family#F10F195" +// knowledge source definition +ks.type = "OWL File" +ks.fileName = "family-benchmark.owl" --"http://www.benchmark.org/family#F6M99" --"http://www.benchmark.org/family#F10F200" --"http://www.benchmark.org/family#F9F156" --"http://www.benchmark.org/family#F6M69" --"http://www.benchmark.org/family#F2F15" --"http://www.benchmark.org/family#F6M100" --"http://www.benchmark.org/family#F8F133" --"http://www.benchmark.org/family#F3F48" --"http://www.benchmark.org/family#F2F30" --"http://www.benchmark.org/family#F4F55" --"http://www.benchmark.org/family#F6F74" --"http://www.benchmark.org/family#F10M199" --"http://www.benchmark.org/family#F7M104" --"http://www.benchmark.org/family#F9M146" --"http://www.benchmark.org/family#F6M71" --"http://www.benchmark.org/family#F2F22" --"http://www.benchmark.org/family#F2M13" --"http://www.benchmark.org/family#F9F169" --"http://www.benchmark.org/family#F5F65" --"http://www.benchmark.org/family#F6M81" --"http://www.benchmark.org/family#F7M131" --"http://www.benchmark.org/family#F7F129" --"http://www.benchmark.org/family#F7M107" --"http://www.benchmark.org/family#F10F189" --"http://www.benchmark.org/family#F8F135" --"http://www.benchmark.org/family#F8M136" --"http://www.benchmark.org/family#F10M188" --"http://www.benchmark.org/family#F9F164" --"http://www.benchmark.org/family#F7F118" --"http://www.benchmark.org/family#F2F10" --"http://www.benchmark.org/family#F6F97" --"http://www.benchmark.org/family#F7F111" --"http://www.benchmark.org/family#F9M151" --"http://www.benchmark.org/family#F4M59" --"http://www.benchmark.org/family#F2M37" --"http://www.benchmark.org/family#F1M1" --"http://www.benchmark.org/family#F9M142" --"http://www.benchmark.org/family#F4M57" --"http://www.benchmark.org/family#F9M170" --"http://www.benchmark.org/family#F5M66" --"http://www.benchmark.org/family#F9F145" +// reasoner +reasoner.type = "fast instance checker" +reasoner.sources = { ks } + +// learning problem +lp.type = "posNegStandard" +lp.positiveExamples = { +"ex:F2F14", +"ex:F2F12", +"ex:F2F19", +"ex:F2F26", +"ex:F2F28", +"ex:F2F36", +"ex:F3F52", +"ex:F3F53", +"ex:F5F62" +,"ex:F6F72" +,"ex:F6F79" +,"ex:F6F77" +,"ex:F6F86" +,"ex:F6F91" +,"ex:F6F84" +,"ex:F6F96" +,"ex:F6F101" +,"ex:F6F93" +,"ex:F7F114" +,"ex:F7F106" +,"ex:F7F116" +,"ex:F7F119" +,"ex:F7F126" +,"ex:F7F121" +,"ex:F9F148" +,"ex:F9F150" +,"ex:F9F143" +,"ex:F9F152" +,"ex:F9F154" +,"ex:F9F141" +,"ex:F9F160" +,"ex:F9F163" +,"ex:F9F158" +,"ex:F9F168" +,"ex:F10F174" +,"ex:F10F179" +,"ex:F10F181" +,"ex:F10F192" +,"ex:F10F193" +,"ex:F10F186" +,"ex:F10F195" +} + +lp.negativeExamples = { +"ex:F6M99" +,"ex:F10F200" +,"ex:F9F156" +,"ex:F6M69" +,"ex:F2F15" +,"ex:F6M100" +,"ex:F8F133" +,"ex:F3F48" +,"ex:F2F30" +,"ex:F4F55" +,"ex:F6F74" +,"ex:F10M199" +,"ex:F7M104" +,"ex:F9M146" +,"ex:F6M71" +,"ex:F2F22" +,"ex:F2M13" +,"ex:F9F169" +,"ex:F5F65" +,"ex:F6M81" +,"ex:F7M131" +,"ex:F7F129" +,"ex:F7M107" +,"ex:F10F189" +,"ex:F8F135" +,"ex:F8M136" +,"ex:F10M188" +,"ex:F9F164" +,"ex:F7F118" +,"ex:F2F10" +,"ex:F6F97" +,"ex:F7F111" +,"ex:F9M151" +,"ex:F4M59" +,"ex:F2M37" +,"ex:F1M1" +,"ex:F9M142" +,"ex:F4M57" +,"ex:F9M170" +,"ex:F5M66" +,"ex:F9F145" +} + +op.type = "rho" +op.useAllConstructor = false +op.useCardinalityRestrictions = false + +// create learning algorithm to run +alg.type = "celoe" +alg.operator = op +alg.maxExecutionTimeInSecondsAfterImprovement = 200 + + + + + This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-02-24 08:59:17
|
Revision: 3585 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3585&view=rev Author: lorenz_b Date: 2012-02-24 08:59:06 +0000 (Fri, 24 Feb 2012) Log Message: ----------- Updated SOLR dependency. Modified Paths: -------------- trunk/components-ext/pom.xml trunk/pom.xml Modified: trunk/components-ext/pom.xml =================================================================== --- trunk/components-ext/pom.xml 2012-02-23 15:16:14 UTC (rev 3584) +++ trunk/components-ext/pom.xml 2012-02-24 08:59:06 UTC (rev 3585) @@ -164,7 +164,7 @@ <dependency> <groupId>org.annolab.tt4j</groupId> <artifactId>org.annolab.tt4j</artifactId> - <version>1.0.14</version> + <version>1.0.16</version> </dependency> <dependency> <groupId>org.ini4j</groupId> Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2012-02-23 15:16:14 UTC (rev 3584) +++ trunk/pom.xml 2012-02-24 08:59:06 UTC (rev 3585) @@ -127,7 +127,7 @@ <dependency> <groupId>org.apache.solr</groupId> <artifactId>solr-core</artifactId> - <version>3.3.0</version> + <version>3.5.0</version> <exclusions> <exclusion> <groupId>commons-logging</groupId> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-03-05 12:12:47
|
Revision: 3604 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3604&view=rev Author: lorenz_b Date: 2012-03-05 12:12:37 +0000 (Mon, 05 Mar 2012) Log Message: ----------- Updated JENA libs. Modified Paths: -------------- trunk/components-core/pom.xml trunk/components-core/src/main/java/org/dllearner/kb/sparql/ExtendedQueryEngineHTTP.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/HttpQuery.java trunk/pom.xml Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2012-03-01 09:32:01 UTC (rev 3603) +++ trunk/components-core/pom.xml 2012-03-05 12:12:37 UTC (rev 3604) @@ -93,12 +93,15 @@ <dependency> <groupId>org.aksw.commons</groupId> <artifactId>sparql</artifactId> - <version>0.2-SNAPSHOT</version> <exclusions> <exclusion> <artifactId>slf4j-api</artifactId> <groupId>org.slf4j</groupId> </exclusion> + <exclusion> + <artifactId>arq</artifactId> + <groupId>com.hp.hpl.jena</groupId> + </exclusion> </exclusions> </dependency> @@ -125,18 +128,10 @@ <artifactId>log4j</artifactId> </dependency> - <!-- Available via central, we use the latest with minor mods to DL Learner - source (IE Dig related code) --> <dependency> - <groupId>com.hp.hpl.jena</groupId> - <artifactId>jena</artifactId> - <exclusions> - <exclusion> - <artifactId>slf4j-api</artifactId> - <groupId>org.slf4j</groupId> - </exclusion> - </exclusions> - </dependency> + <groupId>org.apache.jena</groupId> + <artifactId>jena-arq</artifactId> + </dependency> <!--JSON is in Central --> <dependency> @@ -180,17 +175,10 @@ </dependency> - <!--JENA ARQ is in central - we use the latest --> <dependency> - <groupId>com.hp.hpl.jena</groupId> - <artifactId>arq</artifactId> - <exclusions> - <exclusion> - <artifactId>slf4j-api</artifactId> - <groupId>org.slf4j</groupId> - </exclusion> - </exclusions> - </dependency> + <groupId>org.apache.jena</groupId> + <artifactId>jena-core</artifactId> + </dependency> <dependency> <groupId>junit</groupId> @@ -230,7 +218,7 @@ <dependency> <groupId>commons-codec</groupId> <artifactId>commons-codec</artifactId> - <version>20041127.091804</version> + <version>1.5</version> </dependency> <dependency> <groupId>org.springframework</groupId> @@ -245,5 +233,10 @@ <artifactId>jwnl</artifactId> <version>1.4.1.RC2</version> </dependency> + <dependency> + <groupId>org.apache.lucene</groupId> + <artifactId>lucene-core</artifactId> + <version>3.5.0</version> + </dependency> </dependencies> </project> Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/ExtendedQueryEngineHTTP.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/ExtendedQueryEngineHTTP.java 2012-03-01 09:32:01 UTC (rev 3603) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/ExtendedQueryEngineHTTP.java 2012-03-05 12:12:37 UTC (rev 3604) @@ -19,24 +19,32 @@ package org.dllearner.kb.sparql; -import com.hp.hpl.jena.query.*; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.hp.hpl.jena.query.ARQ; +import com.hp.hpl.jena.query.Dataset; +import com.hp.hpl.jena.query.Query; +import com.hp.hpl.jena.query.QueryExecution; +import com.hp.hpl.jena.query.QueryFactory; +import com.hp.hpl.jena.query.QuerySolution; +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFactory; import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.sparql.engine.http.HttpParams; import com.hp.hpl.jena.sparql.engine.http.Params; import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; +import com.hp.hpl.jena.sparql.graph.GraphFactory; import com.hp.hpl.jena.sparql.resultset.XMLInput; import com.hp.hpl.jena.sparql.util.Context; -import com.hp.hpl.jena.sparql.util.graph.GraphFactory; import com.hp.hpl.jena.util.FileManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.concurrent.TimeUnit; - /** * Created by Claus Stadler * Date: Oct 25, 2010 @@ -364,6 +372,11 @@ public Dataset getDataset() { return null; } + + @Override + public Query getQuery() { + return QueryFactory.create(queryString); + } } /* Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/HttpQuery.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/HttpQuery.java 2012-03-01 09:32:01 UTC (rev 3603) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/HttpQuery.java 2012-03-05 12:12:37 UTC (rev 3604) @@ -37,12 +37,11 @@ import java.util.List; import java.util.Map; -import org.openjena.atlas.lib.Base64; +import org.apache.commons.codec.binary.Base64; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.hp.hpl.jena.query.ARQ; -import com.hp.hpl.jena.sdb.util.Pair; import com.hp.hpl.jena.shared.JenaException; import com.hp.hpl.jena.sparql.ARQInternalErrorException; import com.hp.hpl.jena.sparql.engine.http.HttpParams; @@ -293,7 +292,7 @@ // Build string, get as UTF-8, bytes, translate to base 64. StringBuffer x = new StringBuffer() ; byte b[] = x.append(user).append(":").append(password).toString().getBytes("UTF-8") ; - String y = Base64.encodeBytes(b) ; + String y = Base64.encodeBase64String(b); httpConnection.setRequestProperty("Authorization", "Basic "+y) ; // Overwrite any password details we copied. // Still leaves the copy in the HTTP connection. But this only basic auth. Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2012-03-01 09:32:01 UTC (rev 3603) +++ trunk/pom.xml 2012-03-05 12:12:37 UTC (rev 3604) @@ -162,9 +162,9 @@ <!--Available via central, we use the latest with minor mods to DL Learner source (IE Dig related code) --> <dependency> - <groupId>com.hp.hpl.jena</groupId> - <artifactId>jena</artifactId> - <version>2.6.4</version> + <groupId>org.apache.jena</groupId> + <artifactId>jena-core</artifactId> + <version>2.7.0-incubating</version> </dependency> <!--SwingX is in central --> <dependency> @@ -221,11 +221,10 @@ </dependency> - <!--JENA ARQ is in central - we use the latest --> <dependency> - <groupId>com.hp.hpl.jena</groupId> - <artifactId>arq</artifactId> - <version>2.8.8</version> + <groupId>org.apache.jena</groupId> + <artifactId>jena-arq</artifactId> + <version>2.9.0-incubating</version> </dependency> <!--Junits --> @@ -302,7 +301,7 @@ <dependency> <groupId>org.aksw.commons</groupId> <artifactId>sparql</artifactId> - <version>${aksw.version}</version> + <version>0.2-SNAPSHOT</version> </dependency> <dependency> <groupId>org.aksw.commons</groupId> @@ -542,6 +541,13 @@ <name>Semanticscience projects</name> <url>http://s1.semanticscience.org:8080/nexus/content/groups/public/</url> </repository> + <repository> + <id>apache-repo-releases</id> + <url>https://repository.apache.org/content/repositories/releases/</url> + <releases> + <enabled>true</enabled> + </releases> +</repository> </repositories> <pluginRepositories> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-03-09 09:19:42
|
Revision: 3606 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3606&view=rev Author: lorenz_b Date: 2012-03-09 09:19:31 +0000 (Fri, 09 Mar 2012) Log Message: ----------- Updated JENA libs. Modified Paths: -------------- trunk/components-core/pom.xml trunk/pom.xml Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2012-03-07 14:50:29 UTC (rev 3605) +++ trunk/components-core/pom.xml 2012-03-09 09:19:31 UTC (rev 3606) @@ -102,6 +102,10 @@ <artifactId>arq</artifactId> <groupId>com.hp.hpl.jena</groupId> </exclusion> + <exclusion> + <artifactId>xercesImpl</artifactId> + <groupId>xerces</groupId> + </exclusion> </exclusions> </dependency> @@ -131,6 +135,12 @@ <dependency> <groupId>org.apache.jena</groupId> <artifactId>jena-arq</artifactId> + <exclusions> + <exclusion> + <artifactId>xercesImpl</artifactId> + <groupId>xerces</groupId> + </exclusion> + </exclusions> </dependency> <!--JSON is in Central --> @@ -178,6 +188,12 @@ <dependency> <groupId>org.apache.jena</groupId> <artifactId>jena-core</artifactId> + <exclusions> + <exclusion> + <artifactId>xercesImpl</artifactId> + <groupId>xerces</groupId> + </exclusion> + </exclusions> </dependency> <dependency> @@ -236,7 +252,6 @@ <dependency> <groupId>org.apache.lucene</groupId> <artifactId>lucene-core</artifactId> - <version>3.5.0</version> </dependency> </dependencies> </project> Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2012-03-07 14:50:29 UTC (rev 3605) +++ trunk/pom.xml 2012-03-09 09:19:31 UTC (rev 3606) @@ -164,7 +164,7 @@ <dependency> <groupId>org.apache.jena</groupId> <artifactId>jena-core</artifactId> - <version>2.7.0-incubating</version> + <version>2.7.1-incubating-SNAPSHOT</version> </dependency> <!--SwingX is in central --> <dependency> @@ -197,7 +197,7 @@ <dependency> <groupId>org.apache.lucene</groupId> <artifactId>lucene-core</artifactId> - <version>2.9.3</version> + <version>3.5.0</version> </dependency> <dependency> @@ -224,7 +224,7 @@ <dependency> <groupId>org.apache.jena</groupId> <artifactId>jena-arq</artifactId> - <version>2.9.0-incubating</version> + <version>2.9.1-incubating-SNAPSHOT</version> </dependency> <!--Junits --> @@ -306,7 +306,7 @@ <dependency> <groupId>org.aksw.commons</groupId> <artifactId>model</artifactId> - <version>${aksw.version}</version> + <version>0.2-SNAPSHOT</version> </dependency> <dependency> <groupId>org.aksw.commons</groupId> @@ -317,7 +317,7 @@ <dependency> <groupId>org.aksw.commons</groupId> <artifactId>util</artifactId> - <version>${aksw.version}</version> + <version>0.2-SNAPSHOT</version> </dependency> @@ -421,7 +421,7 @@ <dependency> <groupId>org.springframework</groupId> <artifactId>spring-context</artifactId> - <version>3.0.5.RELEASE</version> + <version>3.1.1.RELEASE</version> <exclusions> <exclusion> <groupId>commons-logging</groupId> @@ -432,7 +432,7 @@ <dependency> <groupId>org.springframework</groupId> <artifactId>spring-beans</artifactId> - <version>3.0.5.RELEASE</version> + <version>3.1.1.RELEASE</version> </dependency> <!--BEGIN Logging Dependencies--> @@ -541,13 +541,24 @@ <name>Semanticscience projects</name> <url>http://s1.semanticscience.org:8080/nexus/content/groups/public/</url> </repository> + <repository> - <id>apache-repo-releases</id> - <url>https://repository.apache.org/content/repositories/releases/</url> - <releases> - <enabled>true</enabled> - </releases> + <id>apache-repo-releases</id> + <url>https://repository.apache.org/content/repositories/releases/</url> + <releases> + <enabled>true</enabled> + </releases> </repository> + <repository> + <id>apache-repo-snapshots</id> + <url>https://repository.apache.org/content/repositories/snapshots/</url> + <releases> + <enabled>false</enabled> + </releases> + <snapshots> + <enabled>true</enabled> + </snapshots> + </repository> </repositories> <pluginRepositories> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2012-03-13 10:19:20
|
Revision: 3609 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3609&view=rev Author: jenslehmann Date: 2012-03-13 10:19:10 +0000 (Tue, 13 Mar 2012) Log Message: ----------- started new SPARQL component Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java trunk/scripts/src/main/java/org/dllearner/scripts/OxtractorHelper.java Added Paths: ----------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlSimpleExtractor.java trunk/test/sparql/new_component/ trunk/test/sparql/new_component/AristotlePos.conf Modified: trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2012-03-12 04:28:22 UTC (rev 3608) +++ trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2012-03-13 10:19:10 UTC (rev 3609) @@ -79,6 +79,7 @@ "org.dllearner.kb.OWLFile", "org.dllearner.kb.SparqlEndpointKS", "org.dllearner.kb.sparql.SparqlKnowledgeSource", + "org.dllearner.kb.sparql.SparqlSimpleExtractor", "org.dllearner.learningproblems.PosNegLPStandard", "org.dllearner.learningproblems.FuzzyPosNegLPStandard", "org.dllearner.learningproblems.PosOnlyLP", Added: trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlSimpleExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlSimpleExtractor.java (rev 0) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/SparqlSimpleExtractor.java 2012-03-13 10:19:10 UTC (rev 3609) @@ -0,0 +1,42 @@ +package org.dllearner.kb.sparql; + +import java.net.URL; + +import org.dllearner.core.ComponentAnn; +import org.dllearner.core.ComponentInitException; +import org.dllearner.core.KnowledgeSource; +import org.dllearner.core.config.ConfigOption; + +@ComponentAnn(name="efficient SPARQL fragment extractor", shortName="sparqls", version=0.1) +public class SparqlSimpleExtractor implements KnowledgeSource { + + @ConfigOption(name="endpointURL", description="URL of the SPARQL endpoint", required=true) + private URL endpointURL = null; + + public SparqlSimpleExtractor() { + + } + + @Override + public void init() throws ComponentInitException { + // TODO Auto-generated method stub + + } + + /** + * @param args + */ + public static void main(String[] args) { + // TODO Auto-generated method stub + + } + + public URL getEndpointURL() { + return endpointURL; + } + + public void setEndpointURL(URL endpointURL) { + this.endpointURL = endpointURL; + } + +} Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-03-12 04:28:22 UTC (rev 3608) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-03-13 10:19:10 UTC (rev 3609) @@ -26,12 +26,14 @@ import org.dllearner.core.ComponentInitException; import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.owl.*; import org.dllearner.kb.OWLAPIOntology; import org.dllearner.kb.OWLFile; import org.dllearner.kb.OWLOntologyKnowledgeSource; import org.dllearner.kb.sparql.SparqlKnowledgeSource; +import org.dllearner.kb.sparql.SparqlSimpleExtractor; import org.dllearner.utilities.owl.*; import org.semanticweb.HermiT.Reasoner.ReasonerFactory; import org.semanticweb.owlapi.apibinding.OWLManager; @@ -150,14 +152,14 @@ Set<OWLImportsDeclaration> directImports = new HashSet<OWLImportsDeclaration>(); - for (AbstractKnowledgeSource source : sources) { + for (KnowledgeSource source : sources) { if (source instanceof OWLOntologyKnowledgeSource) { ontology = ((OWLOntologyKnowledgeSource) source).createOWLOntology(manager); owlAPIOntologies.add(ontology); } - if (source instanceof OWLFile || source instanceof SparqlKnowledgeSource || source instanceof OWLAPIOntology) { + if (source instanceof OWLFile || source instanceof SparqlKnowledgeSource || source instanceof SparqlSimpleExtractor || source instanceof OWLAPIOntology) { if (source instanceof OWLAPIOntology) { ontology = ((OWLAPIOntology) source).getOWLOntolgy(); @@ -167,6 +169,8 @@ ontology = ((SparqlKnowledgeSource) source).getOWLAPIOntology(); manager = ontology.getOWLOntologyManager(); owlAPIOntologies.add(ontology); + } else if(source instanceof SparqlSimpleExtractor) { + // TODO } directImports.addAll(ontology.getImportsDeclarations()); @@ -204,7 +208,7 @@ } else { //KB Files - KB kb = source.toKB(); + KB kb = ((AbstractKnowledgeSource)source).toKB(); if (!(source instanceof OWLOntologyKnowledgeSource)) { //Not sure if this will ever get hit, but leaving in for backward compatibility. Modified: trunk/scripts/src/main/java/org/dllearner/scripts/OxtractorHelper.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/OxtractorHelper.java 2012-03-12 04:28:22 UTC (rev 3608) +++ trunk/scripts/src/main/java/org/dllearner/scripts/OxtractorHelper.java 2012-03-13 10:19:10 UTC (rev 3609) @@ -57,8 +57,8 @@ */ public static void main(String[] args) { OxtractorHelper oh = new OxtractorHelper("http://live.dbpedia.org/sparql","http://dbpedia.org"); - System.out.println(oh.getInstances("http://dbpedia.org/resource/Category:Cities_in_Saxony")); - System.out.println(oh.getCategories("room")); +// System.out.println(oh.getInstances("http://dbpedia.org/resource/Category:Cities_in_Saxony")); + System.out.println(oh.getCategories("Kitchen")); } } Added: trunk/test/sparql/new_component/AristotlePos.conf =================================================================== --- trunk/test/sparql/new_component/AristotlePos.conf (rev 0) +++ trunk/test/sparql/new_component/AristotlePos.conf 2012-03-13 10:19:10 UTC (rev 3609) @@ -0,0 +1,49 @@ +/** + * Some people from Greece. + * Note: DBpedia is always subject to change, solutions will change over time + + * Possible Solution: + * Theorist OR (Mathematician AND Physicist) + * + * This is the same as AristotlePosNeg.conf, but positives only + */ + +// SPARQL options +sparql.type = "sparqls" +sparql.endpointURL = "http://live.dbpedia.org/sparql" + +/* +sparql.defaultGraphURIs = {"http://dbpedia.org"} +sparql.recursionDepth = 1 +//TODOREFACTOR check if predefinedFilter works at all +//predefined filter (1 = YAGO based learning) +sparql.predefinedFilter = "YAGO" +sparql.instances = { +"http://dbpedia.org/resource/Democritus", +"http://dbpedia.org/resource/Zeno_of_Elea", +"http://dbpedia.org/resource/Plato", +"http://dbpedia.org/resource/Socrates", +"http://dbpedia.org/resource/Archytas", +"http://dbpedia.org/resource/Pythagoras", +"http://dbpedia.org/resource/Philolaus" +} +*/ + +reasoner.type = "fast instance checker" +reasoner.sources = {sparql} + +// we want to learn from positive examples only +lp.type = "positive only learning problem" +lp.positiveExamples = { +"http://dbpedia.org/resource/Archytas", +"http://dbpedia.org/resource/Pythagoras", +"http://dbpedia.org/resource/Philolaus" +} + +lp.reasoner = reasoner + +// we use the OCEL algorithm +alg.type = "ocel" +alg.reasoner = reasoner +alg.learningProblem = lp + This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-03-26 12:19:30
|
Revision: 3619 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3619&view=rev Author: lorenz_b Date: 2012-03-26 12:19:20 +0000 (Mon, 26 Mar 2012) Log Message: ----------- Some changed needed to be consistent with parent pom and updated libs. Modified Paths: -------------- trunk/interfaces/pom.xml trunk/interfaces/src/main/assemble/archive.xml trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java trunk/interfaces/src/test/java/org/dllearner/test/junit/ExampleTests.java trunk/scripts/src/main/java/org/dllearner/examples/pdb/PDBIdRdfModel.java trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLEndpointMetrics.java trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java Added Paths: ----------- trunk/scripts/src/main/java/org/dllearner/scripts/DescriptionSorter.java trunk/scripts/src/main/java/org/dllearner/scripts/ManchesterSyntaxExplanationRenderer.java trunk/scripts/src/main/java/org/dllearner/scripts/ManchesterSyntaxObjectRenderer.java trunk/scripts/src/main/java/org/dllearner/scripts/OntologyCleaner.java Removed Paths: ------------- trunk/scripts/src/main/resources/dbpedia_0.75.owl trunk/scripts/src/main/resources/dbpedia_0.75_no_datapropaxioms.owl Modified: trunk/interfaces/pom.xml =================================================================== --- trunk/interfaces/pom.xml 2012-03-26 12:10:46 UTC (rev 3618) +++ trunk/interfaces/pom.xml 2012-03-26 12:19:20 UTC (rev 3619) @@ -15,7 +15,7 @@ </parent> <properties> - <release.name>1.0-beta-1</release.name> + <release.name>1.0-beta-2</release.name> </properties> <profiles> @@ -312,6 +312,7 @@ </dependency> <!--END Logging Dependencies--> + </dependencies> Modified: trunk/interfaces/src/main/assemble/archive.xml =================================================================== --- trunk/interfaces/src/main/assemble/archive.xml 2012-03-26 12:10:46 UTC (rev 3618) +++ trunk/interfaces/src/main/assemble/archive.xml 2012-03-26 12:19:20 UTC (rev 3619) @@ -1,6 +1,7 @@ <assembly> <id>archive</id> <formats> + <format>zip</format> <format>tar.gz</format> <format>zip</format> </formats> @@ -33,6 +34,11 @@ <fileSet> <directory>../examples</directory> <outputDirectory>examples</outputDirectory> + <excludes> + <exclude> + **/qtl/ + </exclude> + </excludes> </fileSet> <fileSet> <directory>doc</directory> Modified: trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java 2012-03-26 12:10:46 UTC (rev 3618) +++ trunk/interfaces/src/main/java/org/dllearner/cli/CLI.java 2012-03-26 12:19:20 UTC (rev 3619) @@ -25,6 +25,7 @@ import java.io.PrintStream; import java.util.ArrayList; import java.util.List; +import java.util.Map.Entry; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Layout; @@ -123,9 +124,12 @@ AbstractCELA la = context.getBean(AbstractCELA.class); new CrossValidation(la,lp,rs,nrOfFolds,false); } else { - knowledgeSource = context.getBeansOfType(KnowledgeSource.class).entrySet().iterator().next().getValue(); - algorithm = context.getBean(LearningAlgorithm.class); - algorithm.start(); +// knowledgeSource = context.getBeansOfType(Knowledge1Source.class).entrySet().iterator().next().getValue(); + for(Entry<String, LearningAlgorithm> entry : context.getBeansOfType(LearningAlgorithm.class).entrySet()){ + algorithm = entry.getValue(); + logger.info("Running algorithm instance \"" + entry.getKey() + "\"(" + algorithm.getClass().getSimpleName() + ")"); + algorithm.start(); + } } } Modified: trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2012-03-26 12:10:46 UTC (rev 3618) +++ trunk/interfaces/src/main/java/org/dllearner/cli/Enrichment.java 2012-03-26 12:19:20 UTC (rev 3619) @@ -352,7 +352,7 @@ System.out.println(resource + " appears to be a class. Running appropriate algorithms.\n"); try { runClassLearningAlgorithms(ks, (NamedClass) resource); - } catch (Exception e) { + } catch (Exception e) {e.printStackTrace(); System.out.println(e.getCause()); } catch (Error e) { System.out.println(e.getCause()); Modified: trunk/interfaces/src/test/java/org/dllearner/test/junit/ExampleTests.java =================================================================== --- trunk/interfaces/src/test/java/org/dllearner/test/junit/ExampleTests.java 2012-03-26 12:10:46 UTC (rev 3618) +++ trunk/interfaces/src/test/java/org/dllearner/test/junit/ExampleTests.java 2012-03-26 12:19:20 UTC (rev 3619) @@ -81,7 +81,7 @@ // setting for SPARQL based tests (0 = no special treatment, 1 = test only SPARQL // examples, 2 = skip SPARQL tests) - int sparql = 0; + int sparql = 2; // we use a logger, which outputs few messages (warnings, errors) SimpleLayout layout = new SimpleLayout(); Modified: trunk/scripts/src/main/java/org/dllearner/examples/pdb/PDBIdRdfModel.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/pdb/PDBIdRdfModel.java 2012-03-26 12:10:46 UTC (rev 3618) +++ trunk/scripts/src/main/java/org/dllearner/examples/pdb/PDBIdRdfModel.java 2012-03-26 12:19:20 UTC (rev 3619) @@ -18,6 +18,7 @@ import com.hp.hpl.jena.query.QueryExecution; import com.hp.hpl.jena.query.QueryExecutionFactory; import com.hp.hpl.jena.query.QueryFactory; +import com.hp.hpl.jena.rdf.model.Literal; import com.hp.hpl.jena.rdf.model.NodeIterator; import com.hp.hpl.jena.rdf.model.Property; import com.hp.hpl.jena.rdf.model.RDFNode; @@ -307,9 +308,9 @@ while ( residuePosition.hasNext() ) { RDFNode positionNode = residuePosition.next(); positionNodes.add(positionNode); - NodeIterator positionLabelNodes = _pdbIdModel.listObjectsOfProperty( positionNode.asResource(), hasValue ); + NodeIterator positionLabelNodes = _pdbIdModel.listObjectsOfProperty( positionNode.as(Resource.class), hasValue ); while ( positionLabelNodes.hasNext() ) { - positionLabels.add(positionLabelNodes.next().asLiteral().getInt()); + positionLabels.add(positionLabelNodes.next().as(Literal.class).getInt()); } } Added: trunk/scripts/src/main/java/org/dllearner/scripts/DescriptionSorter.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/DescriptionSorter.java (rev 0) +++ trunk/scripts/src/main/java/org/dllearner/scripts/DescriptionSorter.java 2012-03-26 12:19:20 UTC (rev 3619) @@ -0,0 +1,60 @@ +// Copyright (c) 2006 - 2008, Clark & Parsia, LLC. <http://www.clarkparsia.com> +// This source code is available under the terms of the Affero General Public License v3. +// +// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions. +// Questions, comments, or requests for clarification: lic...@cl... + +package org.dllearner.scripts; + +import java.util.Collection; +import java.util.Comparator; +import java.util.Set; +import java.util.TreeSet; + +import org.semanticweb.owlapi.model.OWLNamedObject; +import org.semanticweb.owlapi.model.OWLObject; + +/** + * <p>Title: </p> + * + * <p>Description: </p> + * + * <p>Copyright: Copyright (c) 2008</p> + * + * <p>Company: Clark & Parsia, LLC. <http://www.clarkparsia.com></p> + * + * @author Evren Sirin + */ +public class DescriptionSorter { + + /** + * Sorts a set of OWLObjects alphabetically based on toString values. + * Named objects always come before unnamed objects. + * + * @param set the set to sort + * @return the sorted version of the set + */ + public static <N extends OWLObject> Set<N> toSortedSet(Collection<N> set) { + Set<N> sorted = new TreeSet<N>(new Comparator<N>() { + public int compare(N o1, N o2) { + boolean named1 = o1 instanceof OWLNamedObject; + boolean named2 = o2 instanceof OWLNamedObject; + int cmp; + if (named1 && !named2) + cmp = -1; + else if (!named1 && named2) + cmp = 1; + else + cmp = o1.toString().compareTo(o2.toString()); + + if( cmp == 0 && named1 && named2) + cmp = ((OWLNamedObject) o1).getIRI().compareTo( ((OWLNamedObject) o2).getIRI() ); + + return cmp; + } + }); + sorted.addAll(set); + return sorted; + } + +} Added: trunk/scripts/src/main/java/org/dllearner/scripts/ManchesterSyntaxExplanationRenderer.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/ManchesterSyntaxExplanationRenderer.java (rev 0) +++ trunk/scripts/src/main/java/org/dllearner/scripts/ManchesterSyntaxExplanationRenderer.java 2012-03-26 12:19:20 UTC (rev 3619) @@ -0,0 +1,181 @@ +// Copyright (c) 2006 - 2008, Clark & Parsia, LLC. <http://www.clarkparsia.com> +// This source code is available under the terms of the Affero General Public License v3. +// +// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions. +// Questions, comments, or requests for clarification: lic...@cl... + +package org.dllearner.scripts; + +import java.io.IOException; +import java.io.Writer; +import java.util.Set; + +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLException; +import org.semanticweb.owlapi.reasoner.OWLReasoner; + +import com.clarkparsia.owlapi.explanation.io.ExplanationRenderer; +import com.clarkparsia.owlapi.explanation.io.manchester.BlockWriter; +import com.clarkparsia.owlapi.explanation.io.manchester.TextBlockWriter; + +/** + * An explanation renderer implementation that prints the axioms in the + * explanation using Manchester syntax. + * + * @author Evren Sirin + */ +public class ManchesterSyntaxExplanationRenderer implements ExplanationRenderer { + protected ManchesterSyntaxObjectRenderer renderer; + + protected BlockWriter writer; + + protected OWLAxiom currentAxiom; + + private boolean wrapLines = true; + + private boolean smartIndent = true; + + private int index; + + private OWLReasoner reasoner; + + public ManchesterSyntaxExplanationRenderer(OWLReasoner reasoner) { + this.reasoner = reasoner; + } + + /** + * {@inheritDoc} + */ + public void endRendering() { + writer.flush(); + } + + /** + * Returns the current axioms being whose explanation is being rendered or + * <code>null</code> if no axiom has been provided. + * + * @return the current axioms being whose explanation is being rendered or + * <code>null</code> if no axiom has been provided + */ + protected OWLAxiom getCurrentAxiom() { + return currentAxiom; + } + + /** + * Returns the current smart indent value. + * + * @return the current smart indent value + */ + public boolean isSmartIndent() { + return smartIndent; + } + + /** + * Returns the current line wrapping value. + * + * @return the current line wrapping value + */ + public boolean isWrapLines() { + return wrapLines; + } + + /** + * Render an explanation without the axiom header. This function is not guaranteed + * to be supported by the subclasses since an explanation renderer may rely on the + * axiom being explained to reorder the axioms or find irrelevant bits. + * + * @param explanations Set of explanations we are rendering + * @throws OWLException + * @throws IOException + * @throws UnsupportedOperationException + */ + public void render(Set<Set<OWLAxiom>> explanations) throws OWLException, + IOException, UnsupportedOperationException { + render( (OWLAxiom) null, explanations ); + } + + /** + * {@inheritDoc} + */ + public void render(OWLAxiom axiom, Set<Set<OWLAxiom>> explanations) throws OWLException, + IOException { + setCurrentAxiom( axiom ); + + if (index == 1) { + if (axiom != null) { + writer.print("Axiom: "); + axiom.accept(renderer); + writer.println(); + writer.println(); + } + if (explanations.isEmpty()) { + writer.println( "Explanation: AXIOM IS NOT ENTAILED!" ); + return; + } + writer.println("Explanation(s): "); + } + + String header = index++ + ")"; + writer.print(header); + renderSingleExplanation(explanations.iterator().next()); + writer.println(); + } + + protected void renderSingleExplanation(Set<OWLAxiom> explanation) throws OWLException, + IOException { + writer.printSpace(); + writer.printSpace(); + writer.printSpace(); + + writer.startBlock(); + + for( OWLAxiom a : explanation ) { + a.accept( renderer ); + if(reasoner.isEntailed(a)){ + writer.append("(ENTAILED)"); + } + writer.println(); + } + + writer.endBlock(); + writer.println(); + } + + protected void setCurrentAxiom(OWLAxiom currentAxiom) { + this.currentAxiom = currentAxiom; + } + + /** + * Sets the smart indent option which will align the elements of + * intersections and unions in columns when line wrapping is turned on. + * + * @param smartIndent + * the smart indent value + * @see #setWrapLines(boolean) + */ + public void setSmartIndent(boolean smartIndent) { + this.smartIndent = smartIndent; + } + + /** + * Sets the line wrapping option which will print the elements of + * intersections and unions into multiple lines. + * + * @param wrapLines + * the line wrapping value + */ + public void setWrapLines(boolean wrapLines) { + this.wrapLines = wrapLines; + } + + /** + * {@inheritDoc} + */ + public void startRendering(Writer w) { + writer = new TextBlockWriter( w ); + renderer = new ManchesterSyntaxObjectRenderer( this.writer ); + renderer.setWrapLines( isWrapLines() ); + renderer.setSmartIndent( isSmartIndent() ); + index = 1; + } +} Added: trunk/scripts/src/main/java/org/dllearner/scripts/ManchesterSyntaxObjectRenderer.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/ManchesterSyntaxObjectRenderer.java (rev 0) +++ trunk/scripts/src/main/java/org/dllearner/scripts/ManchesterSyntaxObjectRenderer.java 2012-03-26 12:19:20 UTC (rev 3619) @@ -0,0 +1,867 @@ +// Copyright (c) 2006 - 2008, Clark & Parsia, LLC. <http://www.clarkparsia.com> +// This source code is available under the terms of the Affero General Public License v3. +// +// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions. +// Questions, comments, or requests for clarification: lic...@cl... + +package org.dllearner.scripts; + +import java.util.Collection; +import java.util.Iterator; +import java.util.Set; + +import org.semanticweb.owlapi.model.IRI; +import org.semanticweb.owlapi.model.OWLAnnotation; +import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; +import org.semanticweb.owlapi.model.OWLAnnotationProperty; +import org.semanticweb.owlapi.model.OWLAnnotationPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLAnnotationPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLAnonymousIndividual; +import org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLCardinalityRestriction; +import org.semanticweb.owlapi.model.OWLClass; +import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; +import org.semanticweb.owlapi.model.OWLDataAllValuesFrom; +import org.semanticweb.owlapi.model.OWLDataComplementOf; +import org.semanticweb.owlapi.model.OWLDataExactCardinality; +import org.semanticweb.owlapi.model.OWLDataHasValue; +import org.semanticweb.owlapi.model.OWLDataIntersectionOf; +import org.semanticweb.owlapi.model.OWLDataMaxCardinality; +import org.semanticweb.owlapi.model.OWLDataMinCardinality; +import org.semanticweb.owlapi.model.OWLDataOneOf; +import org.semanticweb.owlapi.model.OWLDataProperty; +import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom; +import org.semanticweb.owlapi.model.OWLDataUnionOf; +import org.semanticweb.owlapi.model.OWLDatatype; +import org.semanticweb.owlapi.model.OWLDatatypeDefinitionAxiom; +import org.semanticweb.owlapi.model.OWLDatatypeRestriction; +import org.semanticweb.owlapi.model.OWLDeclarationAxiom; +import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom; +import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLDisjointUnionAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLEquivalentObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLFacetRestriction; +import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom; +import org.semanticweb.owlapi.model.OWLFunctionalObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLHasKeyAxiom; +import org.semanticweb.owlapi.model.OWLInverseFunctionalObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLInverseObjectPropertiesAxiom; +import org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLLiteral; +import org.semanticweb.owlapi.model.OWLNamedIndividual; +import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLObject; +import org.semanticweb.owlapi.model.OWLObjectAllValuesFrom; +import org.semanticweb.owlapi.model.OWLObjectComplementOf; +import org.semanticweb.owlapi.model.OWLObjectExactCardinality; +import org.semanticweb.owlapi.model.OWLObjectHasSelf; +import org.semanticweb.owlapi.model.OWLObjectHasValue; +import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; +import org.semanticweb.owlapi.model.OWLObjectInverseOf; +import org.semanticweb.owlapi.model.OWLObjectMaxCardinality; +import org.semanticweb.owlapi.model.OWLObjectMinCardinality; +import org.semanticweb.owlapi.model.OWLObjectOneOf; +import org.semanticweb.owlapi.model.OWLObjectProperty; +import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; +import org.semanticweb.owlapi.model.OWLObjectPropertyDomainAxiom; +import org.semanticweb.owlapi.model.OWLObjectPropertyRangeAxiom; +import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; +import org.semanticweb.owlapi.model.OWLObjectUnionOf; +import org.semanticweb.owlapi.model.OWLObjectVisitor; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLPropertyExpression; +import org.semanticweb.owlapi.model.OWLQuantifiedRestriction; +import org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLSameIndividualAxiom; +import org.semanticweb.owlapi.model.OWLSubAnnotationPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; +import org.semanticweb.owlapi.model.OWLSubDataPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom; +import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; +import org.semanticweb.owlapi.model.OWLSymmetricObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; +import org.semanticweb.owlapi.model.OWLUnaryPropertyAxiom; +import org.semanticweb.owlapi.model.SWRLArgument; +import org.semanticweb.owlapi.model.SWRLAtom; +import org.semanticweb.owlapi.model.SWRLBuiltInAtom; +import org.semanticweb.owlapi.model.SWRLClassAtom; +import org.semanticweb.owlapi.model.SWRLDataPropertyAtom; +import org.semanticweb.owlapi.model.SWRLDataRangeAtom; +import org.semanticweb.owlapi.model.SWRLDifferentIndividualsAtom; +import org.semanticweb.owlapi.model.SWRLIndividualArgument; +import org.semanticweb.owlapi.model.SWRLLiteralArgument; +import org.semanticweb.owlapi.model.SWRLObjectPropertyAtom; +import org.semanticweb.owlapi.model.SWRLRule; +import org.semanticweb.owlapi.model.SWRLSameIndividualAtom; +import org.semanticweb.owlapi.model.SWRLVariable; +import org.semanticweb.owlapi.vocab.XSDVocabulary; + +import com.clarkparsia.owlapi.explanation.io.manchester.BlockWriter; +import com.clarkparsia.owlapi.explanation.io.manchester.Keyword; + +public class ManchesterSyntaxObjectRenderer implements OWLObjectVisitor { + private boolean wrapLines = true; + private boolean smartIndent = true; + + protected BlockWriter writer; + + /** + * @param writer + */ + public ManchesterSyntaxObjectRenderer(BlockWriter writer) { + this.writer = writer; + } + + public boolean isSmartIndent() { + return smartIndent; + } + + public boolean isWrapLines() { + return wrapLines; + } + + public void setSmartIndent(boolean smartIndent) { + this.smartIndent = smartIndent; + } + + public void setWrapLines(boolean wrapLines) { + this.wrapLines = wrapLines; + } + + /** + * Return the short form (local name) for a URI identifier + * + * @param theIRI + * the URI + * @return the local name part of the URI identifier + */ + protected String shortForm(IRI theIRI) { + String fragment = theIRI.getFragment(); + if( fragment != null ) { + return fragment; + } + String str = theIRI.toString(); + int lastSlashIndex = str.lastIndexOf( '/' ); + if( lastSlashIndex != -1 ) { + return str.substring( lastSlashIndex + 1, str.length() ); + } + return str; + } + + public void visit(OWLAsymmetricObjectPropertyAxiom theAxiom) { + writeUnaryPropertyAxiom( theAxiom, Keyword.ASYMMETRIC_PROPERTY ); + } + + public void visit(OWLClass theOWLClass) { + write( theOWLClass.getIRI() ); + } + + public void visit(OWLClassAssertionAxiom theAxiom) { + writeKeywordInfix( Keyword.TYPE, theAxiom.getIndividual(), theAxiom.getClassExpression() ); + } + + public void visit(OWLAnnotation theAnnotation) { + write( Keyword.ANNOTATION ); + writeSpace(); + write( "(" ); + write( theAnnotation.getProperty() ); + write( " " ); + write( theAnnotation.getValue() ); + write( ")" ); + } + + public void visit(OWLDataAllValuesFrom theDescription) { + writeQuantifiedRestriction( theDescription, Keyword.ONLY ); + } + + public void visit(OWLDataComplementOf theDescription) { + writeKeywordPrefix( Keyword.NOT, theDescription.getDataRange() ); + } + + public void visit(OWLDataExactCardinality theDescription) { + writeCardinalityRestriction( theDescription, Keyword.EXACTLY ); + } + + public void visit(OWLDataMaxCardinality theDescription) { + writeCardinalityRestriction( theDescription, Keyword.MAX ); + } + + public void visit(OWLDataMinCardinality theDescription) { + writeCardinalityRestriction( theDescription, Keyword.MIN ); + } + + public void visit(OWLDataOneOf theDescription) { + writeEnumeration( theDescription.getValues() ); + } + + public void visit(OWLDataProperty theProperty) { + write( theProperty.getIRI() ); + } + + public void visit(OWLDataPropertyAssertionAxiom theAxiom) { + write( theAxiom.getSubject() ); + writeSpace(); + write( theAxiom.getProperty() ); + writeSpace(); + write( theAxiom.getObject() ); + } + + public void visit(OWLDataPropertyDomainAxiom theAxiom) { + writeKeywordInfix( Keyword.DOMAIN, theAxiom.getProperty(), theAxiom.getDomain() ); + } + + public void visit(OWLDataPropertyRangeAxiom theAxiom) { + writeKeywordInfix( Keyword.RANGE, theAxiom.getProperty(), theAxiom.getRange() ); + } + + public void visit(OWLFacetRestriction theRestriction) { + write( theRestriction.getFacet().getSymbolicForm() ); + writeSpace(); + write( theRestriction.getFacetValue() ); + } + + public void visit(OWLDatatypeRestriction theRestriction) { + + write( theRestriction.getDatatype() ); + write( "[" ); + boolean first = true; + for( OWLFacetRestriction restriction : theRestriction.getFacetRestrictions() ) { + if( first ) { + first = false; + } + else { + write( "," ); + writeSpace(); + } + write( restriction ); + } + write( "]" ); + + } + + public void visit(OWLDataSomeValuesFrom theDescription) { + writeQuantifiedRestriction( theDescription, Keyword.SOME ); + } + + public void visit(OWLSubDataPropertyOfAxiom theAxiom) { + writeKeywordInfix( Keyword.SUB_PROPERTY_OF, theAxiom.getSubProperty(), theAxiom + .getSuperProperty() ); + } + + public void visit(OWLDatatype node) { + write( node.getIRI() ); + } + + public void visit(OWLDataHasValue theDescription) { + writeRestriction( theDescription.getProperty(), Keyword.VALUE, theDescription.getValue() ); + } + + public void visit(OWLDeclarationAxiom theAxiom) { + writeKeywordPrefix( Keyword.DECLARATION, theAxiom.getEntity() ); + } + + public void visit(OWLDifferentIndividualsAxiom theAxiom) { + writeNaryAxiom( theAxiom.getIndividuals(), Keyword.DIFFERENT_INDIVIDUAL, + Keyword.DIFFERENT_INDIVIDUALS ); + } + + public void visit(OWLDisjointClassesAxiom theAxiom) { + writeNaryAxiom( theAxiom.getClassExpressions(), Keyword.DISJOINT_CLASS, + Keyword.DISJOINT_CLASSES ); + } + + public void visit(OWLDisjointDataPropertiesAxiom theAxiom) { + writeNaryAxiom( theAxiom.getProperties(), Keyword.DISJOINT_PROPERTY, + Keyword.DISJOINT_PROPERTIES ); + } + + public void visit(OWLDisjointObjectPropertiesAxiom theAxiom) { + writeNaryAxiom( theAxiom.getProperties(), Keyword.DISJOINT_PROPERTY, + Keyword.DISJOINT_PROPERTIES ); + } + + public void visit(OWLDisjointUnionAxiom theAxiom) { + write( theAxiom.getOWLClass() ); + writeSpace(); + write( Keyword.DISJOINT_UNION ); + writeSpace(); + writeNaryKeyword( Keyword.OR, theAxiom.getClassExpressions() ); + } + + public void visit(OWLEquivalentClassesAxiom theAxiom) { + writeNaryAxiom( theAxiom.getClassExpressions(), Keyword.EQUIVALENT_TO, + Keyword.EQUIVALENT_CLASSES ); + } + + public void visit(OWLEquivalentDataPropertiesAxiom theAxiom) { + writeNaryAxiom( theAxiom.getProperties(), Keyword.EQUIVALENT_TO, + Keyword.EQUIVALENT_PROPERTIES ); + } + + public void visit(OWLEquivalentObjectPropertiesAxiom theAxiom) { + writeNaryAxiom( theAxiom.getProperties(), Keyword.EQUIVALENT_TO, + Keyword.EQUIVALENT_PROPERTIES ); + } + + public void visit(OWLFunctionalDataPropertyAxiom theAxiom) { + writeUnaryPropertyAxiom( theAxiom, Keyword.FUNCTIONAL ); + } + + public void visit(OWLFunctionalObjectPropertyAxiom theAxiom) { + writeUnaryPropertyAxiom( theAxiom, Keyword.FUNCTIONAL ); + } + + public void visit(OWLAnonymousIndividual theIndividual) { + write( theIndividual.getID().getID() ); + } + + public void visit(OWLNamedIndividual theIndividual) { +// write( shortForm(theIndividual.getIRI()).toUpperCase() ); + write(theIndividual.getIRI()); + } + + public void visit(OWLInverseFunctionalObjectPropertyAxiom theAxiom) { + writeUnaryPropertyAxiom( theAxiom, Keyword.INVERSE_FUNCTIONAL ); + } + + public void visit(OWLInverseObjectPropertiesAxiom theAxiom) { + writeKeywordInfix( Keyword.INVERSE_OF, theAxiom.getFirstProperty(), theAxiom + .getSecondProperty() ); + } + + public void visit(OWLIrreflexiveObjectPropertyAxiom theAxiom) { + writeUnaryPropertyAxiom( theAxiom, Keyword.IRREFLEXIVE ); + } + + public void visit(OWLNegativeDataPropertyAssertionAxiom theAxiom) { + write( Keyword.NOT_RELATIONSHIP ); + writeSpace(); + write( "(" ); + write( theAxiom.getSubject() ); + writeSpace(); + write( theAxiom.getProperty() ); + writeSpace(); + write( theAxiom.getObject() ); + write( ")" ); + } + + public void visit(OWLNegativeObjectPropertyAssertionAxiom theAxiom) { + write( Keyword.NOT_RELATIONSHIP ); + writeSpace(); + write( "(" ); + write( theAxiom.getSubject() ); + writeSpace(); + write( theAxiom.getProperty() ); + writeSpace(); + write( theAxiom.getObject() ); + write( ")" ); + } + + public void visit(OWLObjectAllValuesFrom theDescription) { + writeQuantifiedRestriction( theDescription, Keyword.ONLY ); + } + + public void visit(OWLObjectComplementOf theDescription) { + writeKeywordPrefix( Keyword.NOT, theDescription.getOperand() ); + } + + public void visit(OWLObjectExactCardinality theDescription) { + writeCardinalityRestriction( theDescription, Keyword.EXACTLY ); + } + + public void visit(OWLObjectIntersectionOf theDescription) { + writeNaryKeyword( Keyword.AND, theDescription.getOperands() ); + } + + public void visit(OWLObjectMaxCardinality theDescription) { + writeCardinalityRestriction( theDescription, Keyword.MAX ); + } + + public void visit(OWLObjectMinCardinality theDescription) { + writeCardinalityRestriction( theDescription, Keyword.MIN ); + } + + public void visit(OWLObjectOneOf theDescription) { + writeEnumeration( theDescription.getIndividuals() ); + } + + public void visit(OWLObjectProperty theProperty) { + write( theProperty.getIRI() ); + } + + public void visit(OWLObjectPropertyAssertionAxiom theAxiom) { + write( theAxiom.getSubject() ); + writeSpace(); + write( theAxiom.getProperty() ); + writeSpace(); + write( theAxiom.getObject() ); + } + + public void visit(OWLSubPropertyChainOfAxiom theAxiom) { + writeCollection( theAxiom.getPropertyChain(), " o", false ); + writeSpace(); + write( Keyword.SUB_PROPERTY_OF ); + writeSpace(); + write( theAxiom.getSuperProperty() ); + } + + public void visit(OWLObjectPropertyDomainAxiom theAxiom) { + writeKeywordInfix( Keyword.DOMAIN, theAxiom.getProperty(), theAxiom.getDomain() ); + } + + public void visit(OWLObjectInverseOf theInverse) { + writeKeywordPrefix( Keyword.INVERSE, theInverse.getInverse() ); + } + + public void visit(OWLObjectPropertyRangeAxiom theAxiom) { + writeKeywordInfix( Keyword.RANGE, theAxiom.getProperty(), theAxiom.getRange() ); + } + + public void visit(OWLObjectHasSelf theRestriction) { + writeRestriction( theRestriction.getProperty(), Keyword.SELF ); + } + + public void visit(OWLObjectSomeValuesFrom theDescription) { + writeQuantifiedRestriction( theDescription, Keyword.SOME ); + } + + public void visit(OWLSubObjectPropertyOfAxiom theAxiom) { + writeKeywordInfix( Keyword.SUB_PROPERTY_OF, theAxiom.getSubProperty(), theAxiom + .getSuperProperty() ); + } + + public void visit(OWLObjectUnionOf theDescription) { + writeNaryKeyword( Keyword.OR, theDescription.getOperands() ); + } + + public void visit(OWLObjectHasValue theDescription) { + writeRestriction( theDescription.getProperty(), Keyword.VALUE, theDescription.getValue() ); + } + + public void visit(OWLOntology ontology) { + write( ontology.getOntologyID().getOntologyIRI() ); + } + + public void visit(OWLReflexiveObjectPropertyAxiom theAxiom) { + writeUnaryPropertyAxiom( theAxiom, Keyword.REFLEXIVE_PROPERTY ); + } + + public void visit(OWLSameIndividualAxiom theAxiom) { + writeNaryAxiom( theAxiom.getIndividuals(), Keyword.SAME_INDIVIDUAL, + Keyword.SAME_INDIVIDUALS ); + } + + public void visit(OWLSubClassOfAxiom theAxiom) { + writeKeywordInfix( Keyword.SUB_CLASS_OF, theAxiom.getSubClass(), theAxiom.getSuperClass() ); + } + + public void visit(OWLSymmetricObjectPropertyAxiom theAxiom) { + writeUnaryPropertyAxiom( theAxiom, Keyword.SYMMETRIC ); + } + + public void visit(OWLTransitiveObjectPropertyAxiom theAxiom) { + writeUnaryPropertyAxiom( theAxiom, Keyword.TRANSITIVE ); + } + + public void visit(OWLLiteral node) { + if( node.isRDFPlainLiteral() ) { + write( "\"" ); + write( node.getLiteral() ); + write( "\"" ); + if( node.getLang() != null && !node.getLang().equals("")) { + write( "@" ); + write( node.getLang() ); + } + } + else if( node.getDatatype().getIRI().equals( XSDVocabulary.INTEGER.getIRI() ) + || node.getDatatype().getIRI().equals( XSDVocabulary.DECIMAL.getIRI() ) ) { + write( node.getLiteral() ); + } + else if( node.getDatatype().getIRI().equals( XSDVocabulary.FLOAT.getIRI() ) ) { + write( node.getLiteral() ); + write( "f" ); + } + else { + write( "\"" ); + write( node.getLiteral() ); + write( "\"" ); + write( "^^" ); + write( node.getDatatype() ); + } + } + + public void visit(SWRLLiteralArgument node) { + write( node.getLiteral() ); + } + + public void visit(SWRLIndividualArgument node) { + write( node.getIndividual() ); + } + + public void visit(SWRLVariable node) { + write( "?" ); + write( node.getIRI() ); + } + + public void visit(SWRLBuiltInAtom node) { + write( node.getPredicate() ); + write( "(" ); + for( SWRLArgument arg : node.getArguments() ) { + write( arg ); + write( " " ); + } + write( ")" ); + } + + public void visit(SWRLClassAtom node) { + write( node.getPredicate() ); + write( "(" ); + write( node.getArgument() ); + write( ")" ); + } + + /* + * this is all the SWRL rendering stuff that we'll provide some defaults for + * using evren's concise format stuff + */ + + public void visit(SWRLDataRangeAtom node) { + write( node.getPredicate() ); + write( "(" ); + write( node.getArgument() ); + write( ")" ); + } + + public void visit(SWRLDataPropertyAtom node) { + write( node.getPredicate() ); + write( "(" ); + write( node.getFirstArgument() ); + write( ", " ); + write( node.getSecondArgument() ); + write( ")" ); + } + + public void visit(SWRLDifferentIndividualsAtom node) { + write( "differentFrom" ); + write( "(" ); + write( node.getFirstArgument() ); + write( ", " ); + write( node.getSecondArgument() ); + write( ")" ); + } + + public void visit(SWRLObjectPropertyAtom node) { + write( node.getPredicate() ); + write( "(" ); + write( node.getFirstArgument() ); + write( ", " ); + write( node.getSecondArgument() ); + write( ")" ); + } + + /** + * @inheritDoc + */ + public void visit(SWRLRule rule) { + write( "Rule" ); + write( "(" ); + +// if( !rule.isAnonymous() ) { +// write( rule.getIRI() ); +// writeSpace(); +// } + + boolean first = true; + for( SWRLAtom at : rule.getBody() ) { + if( first ) + first = false; + else + write( ", " ); + write( at ); + + } + write( " -> " ); + + first = true; + for( SWRLAtom at : rule.getHead() ) { + if( first ) + first = false; + else + write( ", " ); + write( at ); + } + + write( ")" ); + } + + public void visit(SWRLSameIndividualAtom node) { + write( "sameAs" ); + write( "(" ); + write( node.getFirstArgument() ); + write( ", " ); + write( node.getSecondArgument() ); + write( ")" ); + } + + protected void writeNaryKeyword(Keyword theKeyword, Set<? extends OWLObject> theObjects) { + + theObjects = DescriptionSorter.toSortedSet( theObjects ); + + Iterator<? extends OWLObject> aIter = theObjects.iterator(); + + // write( "(" ); + + if( smartIndent ) + writer.startBlock(); + + write( aIter.next() ); + while( aIter.hasNext() ) { + if( wrapLines ) { + writeNewLine(); + } + else { + writeSpace(); + } + + if( theKeyword != null ) { + write( theKeyword ); + writeSpace(); + } + + write( aIter.next() ); + } + + if( smartIndent ) + writer.endBlock(); + + // write( ")" ); + } + + protected void writeCardinalityRestriction(OWLCardinalityRestriction<?, ?, ?> theRestriction, + Keyword theKeyword) { + if( theRestriction.isQualified() ) + writeRestriction( theRestriction.getProperty(), theKeyword, theRestriction + .getCardinality(), theRestriction.getFiller() ); + else + writeRestriction( theRestriction.getProperty(), theKeyword, theRestriction + .getCardinality() ); + } + + /** + * Render an n-ary axiom with special handling for the binary case. + * + * @param set + * objects to be rendered + * @param binary + * keyword used for binary case + * @param nary + * keyword used for n-ary case + */ + protected void writeNaryAxiom(Set<? extends OWLObject> set, Keyword binary, Keyword nary) { + Set<? extends OWLObject> sortedSet = DescriptionSorter.toSortedSet( set ); + Iterator<? extends OWLObject> aIter = sortedSet.iterator(); + + if( set.size() == 2 ) { + writeKeywordInfix( binary, aIter.next(), aIter.next() ); + } + else { + write( nary ); + write( "(" ); + + writeNaryKeyword( null, set ); + + write( ")" ); + } + } + + protected void writeQuantifiedRestriction(OWLQuantifiedRestriction<?, ?, ?> theRestriction, + Keyword theKeyword) { + writeRestriction( theRestriction.getProperty(), theKeyword, theRestriction.getFiller() ); + } + + protected void writeRestriction(OWLPropertyExpression<?, ?> theProperty, Keyword theKeyword, + Object... theArgs) { + write( "(" ); + + write( theProperty ); + writeSpace(); + write( theKeyword ); + for( Object aObject : theArgs ) { + writeSpace(); + if( aObject instanceof OWLObject ) { + write( (OWLObject) aObject ); + } + else { + write( aObject.toString() ); + } + } + + write( ")" ); + + } + + protected void writeUnaryPropertyAxiom(OWLUnaryPropertyAxiom<?> theAxiom, Keyword keyword) { + writeKeywordPrefix( keyword, theAxiom.getProperty() ); + } + + protected void writeKeywordPrefix(Keyword keyword, OWLObject arg) { + write( keyword ); + writeSpace(); +// write( "(" ); + write( arg ); +// write( ")" ); + } + + protected void writeKeywordPrefix(Keyword keyword, OWLObject arg1, OWLObject arg2) { + write( keyword ); + writeSpace(); + write( "(" ); + write( arg1 ); + writeSpace(); + write( arg2 ); + write( ")" ); + } + + protected void writeKeywordInfix(Keyword keyword, OWLObject arg1, OWLObject arg2) { + write( arg1 ); + writeSpace(); + write( keyword ); + writeSpace(); + write( arg2 ); + } + + protected void writeEnumeration(Set<? extends OWLObject> objects) { + write( Keyword.OPEN_BRACE ); + writeCollection( objects, ",", true ); + write( Keyword.CLOSE_BRACE ); + } + + protected void writeCollection(Collection<? extends OWLObject> objects, String separator, boolean sort) { + if( sort ) + objects = DescriptionSorter.toSortedSet( objects ); + boolean first = true; + for( OWLObject ind : objects ) { + if( first ) { + first = false; + } + else { + write( separator ); + writeSpace(); + } + write( ind ); + } + } + + protected void write(OWLObject object) { + object.accept( this ); + } + + protected void write(Keyword keyword) { + write( keyword.getLabel().toUpperCase() ); + } + + protected void write(String s) { + writer.print( s ); + } + + protected void write(IRI iri) { + write( shortForm( iri ) ); + } + + protected void writeNewLine() { + writer.println(); + } + + protected void writeSpace() { + write( " " ); + } + + /** + * {@inheritDoc} + */ + public void visit(OWLHasKeyAxiom theAxiom) { + write( theAxiom.getClassExpression() ); + writeSpace(); + write( Keyword.HAS_KEY ); + if( !theAxiom.getObjectPropertyExpressions().isEmpty() ) { + writeCollection( theAxiom.getObjectPropertyExpressions(), "", true ); + writeSpace(); + } + writeCollection( theAxiom.getDataPropertyExpressions(), "", true ); + } + + /** + * {@inheritDoc} + */ + public void visit(OWLDatatypeDefinitionAxiom theAxiom) { + writeKeywordInfix( Keyword.EQUIVALENT_TO, theAxiom.getDatatype(), theAxiom.getDataRange() ); + } + + /** + * {@inheritDoc} + */ + public void visit(OWLAnnotationAssertionAxiom theAxiom) { + write( theAxiom.getSubject() ); + writeSpace(); + write( theAxiom.getProperty() ); + writeSpace(); + write( theAxiom.getValue() ); + } + + /** + * {@inheritDoc} + */ + public void visit(OWLSubAnnotationPropertyOfAxiom theAxiom) { + writeKeywordInfix( Keyword.SUB_PROPERTY_OF, theAxiom.getSubProperty(), theAxiom.getSuperProperty() ); + } + + /** + * {@inheritDoc} + */ + public void visit(OWLAnnotationPropertyDomainAxiom theAxiom) { + writeKeywordInfix( Keyword.DOMAIN, theAxiom.getProperty(), theAxiom.getDomain() ); + } + + /** + * {@inheritDoc} + */ + public void visit(OWLAnnotationPropertyRangeAxiom theAxiom) { + writeKeywordInfix( Keyword.RANGE, theAxiom.getProperty(), theAxiom.getRange() ); + } + + /** + * {@inheritDoc} + */ + public void visit(OWLDataIntersectionOf node) { + writeNaryKeyword( Keyword.AND, node.getOperands() ); + } + + /** + * {@inheritDoc} + */ + public void visit(OWLDataUnionOf node) { + writeNaryKeyword( Keyword.OR, node.getOperands() ); + } + + /** + * {@inheritDoc} + */ + public void visit(OWLAnnotationProperty property) { + write( property.getIRI() ); + } + + /** + * {@inheritDoc} + */ + public void visit(IRI iri) { + write( iri ); + } +} Added: trunk/scripts/src/main/java/org/dllearner/scripts/OntologyCleaner.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/OntologyCleaner.java (rev 0) +++ trunk/scripts/src/main/java/org/dllearner/scripts/OntologyCleaner.java 2012-03-26 12:19:20 UTC (rev 3619) @@ -0,0 +1,43 @@ +package org.dllearner.scripts; + +import java.io.File; +import java.io.FileOutputStream; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.io.RDFXMLOntologyFormat; +import org.semanticweb.owlapi.model.OWLAxiom; +import org.semanticweb.owlapi.model.OWLEntity; +import org.semanticweb.owlapi.model.OWLOntology; + +public class OntologyCleaner { + + /** + * @param args + */ + public static void main(String[] args) throws Exception{ + List<String> namespaces = Arrays.asList(new String[]{"http://schema.org/"}); + OWLOntology ontology = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument( + OntologyCleaner.class.getClassLoader().getResourceAsStream("dbpedia_0.75.owl")); + + //get all entities starting with one of the declared namespaces + Set<OWLAxiom> axioms2Remove = new HashSet<OWLAxiom>(); + for(OWLAxiom axiom : ontology.getLogicalAxioms()){ + for(OWLEntity entity : axiom.getSignature()){ + for(String namespace : namespaces){ + if(entity.toStringID().startsWith(namespace)){ + axioms2Remove.add(axiom); + } + } + + } + } + OWLManager.createOWLOntologyManager().removeAxioms(ontology, axioms2Remove); + OWLManager.createOWLOntologyManager().saveOntology(ontology, new RDFXMLOntologyFormat(), new FileOutputStream(new File("src/main/resources/dbpedia_0.75_cleaned.owl"))); + + } + +} Modified: trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLEndpointMetrics.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLEndpointMetrics.java 2012-03-26 12:10:46 UTC (rev 3618) +++ trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLEndpointMetrics.java 2012-03-26 12:19:20 UTC (rev 3619) @@ -1,9 +1,12 @@ package org.dllearner.scripts; +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; import java.net.URL; import java.util.Map; +import java.util.Map.Entry; import java.util.TreeMap; -import java.util.Map.Entry; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; @@ -18,16 +21,19 @@ import com.hp.hpl.jena.query.QuerySolution; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.rdf.model.Resource; +import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; public class SPARQLEndpointMetrics { //parameters for thread pool //Parallel running Threads(Executor) on System - private static int corePoolSize = 1; + private static int corePoolSize = 5; //Maximum Threads allowed in Pool private static int maximumPoolSize = 20; //Keep alive time for waiting threads for jobs(Runnable) private static long keepAliveTime = 10; + + private static int queryTimeout = 30; /** * @param args @@ -84,9 +90,13 @@ ArrayBlockingQueue<Runnable> workQueue = new ArrayBlockingQueue<Runnable>(endpoints.size()); ThreadPoolExecutor threadPool = new ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, TimeUnit.SECONDS, workQueue); + final BufferedWriter output = new BufferedWriter(new FileWriter("log/endpointMetrics.html", true)); + final StringBuffer sb = new StringBuffer(); - sb.append("<table border=\"1\">"); - sb.append("<tr><th>#classes</th><th>#op</th><th>#dp</th><th>#individuals</th></tr>"); + sb.append("<table border=\"1\">\n"); + sb.append("<tr><th>endpoint</th><th>#classes</th><th>#op</th><th>#dp</th><th>URL</th><th>ERROR</th></tr>\n"); + output.append(sb.toString()); + output.flush(); // perform enrichment on endpoints for(final Entry<String,SparqlEndpoint> endpoint : endpoints.entrySet()) { @@ -96,30 +106,70 @@ @Override public void run() { SparqlEndpoint se = endpoint.getValue(); + String name = endpoint.getKey(); // System.out.println(se); String filter = "FILTER()"; try { //count classes - String query = "SELECT (COUNT(DISTINCT ?type) AS ?cnt) WHERE {?s a ?type.}"; - int classCnt = new SparqlQuery(query, se).send().next().getLiteral("cnt").getInt(); +// String query = "SELECT (COUNT(DISTINCT ?type) AS ?cnt) WHERE {?s a ?type.}"; +// int classCnt = executeQuery(query, se, 20).next().getLiteral("cnt").getInt(); +// +// //count object properties +// query = "SELECT (COUNT(DISTINCT ?p) AS ?cnt) WHERE {?s ?p ?o.}"; +// int opCnt = executeQuery(query, se, 20).next().getLiteral("cnt").getInt(); +// +// //count data properties +// query = "SELECT (COUNT(DISTINCT ?p) AS ?cnt) WHERE {?s ?p ?o.}"; +// int dpCnt = executeQuery(query, se, 20).next().getLiteral("cnt").getInt(); +// +// //count individuals +// query = "SELECT (COUNT(DISTINCT ?s) AS ?cnt) WHERE {?s a ?type.}"; +// int indCnt = executeQuery(query, se, 20).next().getLiteral("cnt").getInt(); + String query = "SELECT DISTINCT ?type WHERE {?s a ?type. ?type a <http://www.w3.org/2002/07/owl#Class>}"; + int classCnt = countEntities(query, se); + //count object properties - query = "SELECT (COUNT(DISTINCT ?p) AS ?cnt) WHERE {?s ?p ?o.}"; - int opCnt = new SparqlQuery(query, se).send().next().getLiteral("cnt").getInt(); + query = "SELECT DISTINCT ?p WHERE {?s ?p ?o. ?p a <http://www.w3.org/2002/07/owl#ObjectProperty>}"; + int opCnt = countEntities(query, se); //count data properties - query = "SELECT (COUNT(DISTINCT ?p) AS ?cnt) WHERE {?s ?p ?o.}"; - int dpCnt = new SparqlQuery(query, se).send().next().getLiteral("cnt").getInt(); + query = "SELECT DISTINCT ?p WHERE {?s ?p ?o. ?p a <http://www.w3.org/2002/07/owl#DatatypeProperty>}"; + int dpCnt = countEntities(query, se); //count individuals - query = "SELECT (COUNT(DISTINCT ?s) AS ?cnt) WHERE {?s a ?type.}"; - int indCnt = new SparqlQuery(query, se).send().next().getLiteral("cnt").getInt(); +// query = "SELECT (COUNT(DISTINCT ?s) AS ?cnt) WHERE {?s a ?type.}"; +// int indCnt = executeQuery(query, se, 20).next().getLiteral("cnt").getInt(); - sb.append("<tr><td>" + classCnt + "</td><td>" + opCnt + "</td>" + "</td><td>" + dpCnt + "</td><td>" + indCnt + "</td></tr>\n"); + String line = "<tr><td>" + name + "</td><td>" + + classCnt + "</td><td>" + + opCnt + "</td><td>" + + dpCnt + "</td><td>" + + se.getURL() + "</td><td>" + + "" + "</td></tr>\n"; + + sb.append(line); + output.append(line); + output.flush(); + System.out.println(sb); } catch (Exception e) { - sb.append(""); + Throwable t = e.getCause(); + String errorCode; + if(t == null){ + errorCode = e.getClass().getSimpleName(); + } else { + errorCode = t.getMessage(); + } + String line = "<tr><td>" + name + "</td><td>-1</td><td>-1</td><td>-1</td><td>" + se.getURL() + "</td><td>" + errorCode + "</tr>\n"; + sb.append(line); + try { + output.append(line); + output.flush(); + } catch (IOException e1) { + e1.printStackTrace(); + } } } @@ -127,7 +177,39 @@ } threadPool.shutdown(); System.out.println(sb); - + } + + private static int countEntities(String baseQuery, SparqlEndpoint endpoint) throws Exception{ + int cnt = 0; + int limit = 1000; + int offset = 0; + ResultSet rs; + int tmp = 0; + do{ + String query = baseQuery + " LIMIT " + limit + " OFFSET " + offset;System.out.println(endpoint.getURL() + ": " + query); + rs = executeQuery(query, endpoint); + tmp = 0; + while(rs.hasNext()){ + rs.next(); + tmp++; + } + cnt += tmp; + offset += limit; + } while (tmp >= limit); + + return cnt; + } + + private static ResultSet executeQuery(String queryString, SparqlEndpoint endpoint) throws Exception{ + try { + QueryEngineHTTP qe = new QueryEngineHTTP(endpoint.getURL().toString(), queryString); + qe.setDefaultGraphURIs(endpoint.getDefaultGraphURIs()); + qe.setTimeout(queryTimeout * 1000); + return qe.execSelect(); + } catch (Exception e) { + throw e; + } + } } Modified: trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java 2012-03-26 12:10:46 UTC (rev 3618) +++ trunk/scripts/src/main/java/org/dllearner/scripts/SPARQLSampleDebugging.java 2012-03-26 12:19:20 UTC (rev 3619) @@ -19,6 +19,7 @@ import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Types; +import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -26,6 +27,7 @@ import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; +import java.util.Random; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; @@ -78,7 +80,6 @@ import uk.ac.manchester.cs.owl.owlapi.OWLDataFactoryImpl; import com.clarkparsia.owlapi.explanation.PelletExplanation; -import com.clarkparsia.owlapi.explanation.io.manchester.ManchesterSyntaxExplanationRenderer; import com.clarkparsia.pellet.owlapiv3.PelletReasoner; import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; import com.hp.hpl.jena.query.Query; @@ -121,6 +122,8 @@ private OWLReasoner dbpediaReasoner; private OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + private Random randomGen = new Random(2222343); + static {PelletExplanation.setup();} public SPARQLSampleDebugging(SparqlEndpoint endpoint) { @@ -160,6 +163,23 @@ } } + private Set<OWLAxiom> getBlackList(){ + Set<OWLAxiom> blacklist = new HashSet<OWLAxiom>(); + OWLAxiom ax = factory.getOWLSubClassOfAxiom( + factory.getOWLObjectSomeValuesFrom(factory.getOWLObjectProperty(IRI.create("http://dbpedia.org/ontology/leaderName")), factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Person"))), + factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Settlement"))); + blacklist.add(ax); + ax = factory.getOWLSubClassOfAxiom( + factory.getOWLObjectSomeValuesFrom(factory.getOWLObjectProperty(IRI.create("http://dbpedia.org/ontology/language")), factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Language"))), + factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Work"))); + blacklist.add(ax); + ax = factory.getOWLSubClassOfAxiom( + factory.getOWLObjectSomeValuesFrom(factory.getOWLObjectProperty(IRI.create("http://dbpedia.org/ontology/officialLanguage")), factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Language"))), + factory.getOWLClass(IRI.create("http://dbpedia.org/ontology/Country"))); + blacklist.add(ax); + return blacklist; + } + private void writeToDB(String resource, int fragementSize, boolean consistent, Set<Set<OWLAxiom>> explanations) { try { ps.setString(1, resource); @@ -225,25 +245,19 @@ return resources; } - private Set<String> extractSampleResourcesSingle(int size){ - logger.info("Extracting " + sampleSize + " sample resources..."); + private String extractSampleResource(int maxOffset){ long startTime = System.currentTimeMillis(); - Set<String> resources = new HashSet<String>(); - String query = "SELECT COUNT(DISTINCT ?s) WHERE {?s a ?type}"; + int random = randomGen.nextInt(maxOffset); + logger.info("Extracting sample resource (" + random + ")..."); + + String query = String.format("SELECT DISTINCT ?s WHERE {?s a ?type} LIMIT 1 OFFSET %d", random); ResultSet rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(endpoint, query)); - int max = rs.next().getLiteral(rs.getResultVars().get(0)).getInt(); + String resource = rs.next().getResource("s").getURI(); - for(int i = 0; i < size; i++){ - int random = (int)(Math.random() * max); - query = String.format("SELECT DISTINCT ?s WHERE {?s a ?type} LIMIT 1 OFFSET %d", random); - rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(endpoint, query)); - resources.add(rs.next().getResource("s").getURI()); - } - logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); - return resources; + return resource; } private OWLOntology extractSampleModule(Set<String> resources){ @@ -280,17 +294,12 @@ return explanations; } - private Set<Set<OWLAxiom>> computeExplanations(PelletReasoner reasoner){ + private Set<Set<OWLAxiom>> computeExplanations(PelletReasoner reasoner) throws Exception{ logger.info("Computing explanations..."); long startTime = System.currentTimeMillis(); PelletExplanation expGen = new PelletExplanation(reasoner); Set<Set<OWLAxiom>> explanations = new HashSet<Set<OWLAxiom>>(maxNrOfExplanations); - try { - explanations = expGen.getInconsistencyExplanations(maxNrOfExplanations); - } catch (Exception e) { - e.printStackTrace(); - logger.error(e); - } + explanations = expGen.getInconsistencyExplanations(maxNrOfExplanations); logger.info("...done in " + (System.currentTimeMillis()-startTime) + "ms."); return explanations; } @@ -387,7 +396,7 @@ } }); OWLDataFactory f = OWLManager.createOWLOntologyManager().getOWLDataFactory(); - ManchesterSyntaxExplanationRenderer renderer = new ManchesterSyntaxExplanationRenderer(); + ManchesterSyntaxExplanationRenderer renderer = new ManchesterSyntaxExplanationRenderer(dbpediaReasoner); PrintWriter out = new PrintWriter( System.out ); for(OWLObjectProperty p : reasoner.getRootOntology().getObjectPropertiesInSignature()){ boolean satisfiable = reasoner.isSatisfiable(f.getOWLObjectExactCardinality(1, p)); @@ -427,9 +436,12 @@ } - public void run(OWLOntology reference) throws OWLOntologyCreationException{ - Set<OWLOntology> ontologies = new HashSet<OWLOntology>(); - ontologies.add(reference); + + public void computeSampleExplanations(OWLOntology reference, int nrOfExplanations) throws OWLOntologyCreationException, IOException{ + Set<Set<OWLAxiom>> sampleExplanations = new HashSet<Set<OWLAxiom>>(); + manager = reference.getOWLOntologyManager(); + manager.removeAxioms(reference, getBlackList()); + PelletReasoner reasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(reference); reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); Set<OWLClass> unsatisfiableClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); @@ -438,25 +450,70 @@ logger.info("Unsatisfiable object properties(" + unsatisfiableObjectProperties.size() + "): " + unsatisfiableObjectProperties); Set<OWLDataProperty> unsatisfiableDataProperties = getUnsatisfiableDataProperties(reasoner); logger.info("Unsatisfiable data properties(" + unsatisfiableDataProperties.size() + "): " + unsatisfiableDataProperties); - OWLOntology merged; OWLOntology module; + reasoner.isConsistent(); + OWLOntologyManager man = OWLManager.createOWLOntologyManager(); + man.addOntologyChangeListener( reasoner ); + Model model; - Set<String> resources = extractSampleResourcesChunked(sampleSize); - for(String resource : resources){resource = "http://dbpedia.org/resource/Leipzig"; - logger.info("Resource " + resource); - module = extractSampleModule(Collections.singleton(resource)); - ontologies.add(module); - merged = OWLManager.createOWLOntologyManager().createOntology(IRI.create("http://merged.en"), ontologies); - reasoner = PelletReasonerFactory.getInstance().createNonBufferingReasoner(merged); + FileWriter out = new FileWriter( "log/alljustifications" + System.currentTimeMillis() + ".txt" ); + ManchesterSyntaxExplanationRenderer renderer = new ManchesterSyntaxExplanationRenderer(dbpediaReasoner); + renderer.startRendering(out ); + + String query = "SELECT COUNT(DISTINCT ?s) WHERE {?s a ?type}"; + ResultSet rs = SparqlQuery.convertJSONtoResultSet(cache.executeSelectQuery(endpoint, query)); + int maxOffset = rs.next().getLiteral(rs.getResultVars().get(0)).getInt(); + + while(sampleExplanations.size() < nrOfExplanations){ + String resource = extractSampleResource(maxOffset);//resource = "http://dbpedia.org/resource/Pigeon_%28company%29"; + logger.info("###################################################################"); + logger.info("Resource " + resource);//resource = "http://dbpedia.org/resource/The_Man_Who_Wouldn%27t_Die"; + module = extractSampleModule(resource);module.getOWLOntologyManager().removeAxioms(module, module.getAxioms(AxiomType.DATA_PROPERTY_ASSERTION)); + manager.addAxioms(reference, module.getABoxAxioms(true)); + manager.removeAxioms(reference, reference.getAxioms(AxiomType.DATA_PROPERTY_ASSERTION)); boolean isConsistent = reasoner.isConsistent(); logger.info("Consistent: " + isConsistent); Set<Set<OWLAxiom>> explanations = null; if(!isConsistent){ - explanations = computeExplanations(reasoner); + explanations = new HashSet<Set<OWLAxiom>>(); + try { + explanations.addAll(computeExplanations(reasoner)); + } catch (Exception e1) { + continue; + } + model = convert(reference); + explanations.addAll(computeInconsistencyExplanationsByAsymmetryPattern(reference, model)); + explanations.addAll(computeInconsistencyExplanationsByIrreflexivityPattern(reference, model)); + explanations.addAll(computeInconsistencyExplanationsByFunctionalityPattern(reference, model)); + explanations.addAll(computeInconsistencyExplanationsByInverseFunctionalityPattern(reference, model)); logger.info("Found " + explanations.size() + " explanations."); + for(Set<OWLAxiom> exp : explanations){ + logger.info(exp + "\n"); + out.flush(); + try { + renderer.render( Collections.singleton(exp) ); + } catch (UnsupportedOperationException e) { + e.printStackTrace(); + } catch (OWLException e) { + e.printStackTrace(); + } + } + boolean addSample = true; + while(addSample){ + int rnd = 0; + if(explanations.size() > 1){ + rnd = new Random().nextInt(explanations.size()-1); + } + Set<OWLAxiom> sampleExplanation = new ArrayList<Set<OWLAxiom>>(explanations).get(rnd); + if(!containsUnsatisfiableObjectProperty(sampleExplanation)){ + sampleExplanations.add(sampleExplanation); + addSample = false; + } + + } + Map<AxiomType, Integer> axiomType2CountMap = new HashMap<AxiomType, Integer>(); for(Set<OWLAxiom> explanation : explanations){ - logger.info(explanation); for(OWLAxiom axiom : explanation){ Integer cnt = axiomType2CountMap.get(axiom.getAxiomType()); if(cnt == null){ @@ -466,19 +523,35 @@ axiomType2CountMap.put(axiom.getAxiomType(), cnt); } } + logger.info("Axiom type count:"); for(Entry<AxiomType, Integer> entry : axiomType2CountMap.entrySet()){ logger.info(entry.getKey() + "\t: " + entry.getValue()); } } - ontologies.remove(module); - reasoner.dispose(); + man.removeAxioms(reference, module.getABoxAxioms(true)); // writeToDB(resource, module.getLogicalAxiomCount(), isConsistent, explanations); - break; } + renderer.endRendering(); + FileWriter sampleOut = new FileWriter( "log/sample_justifications" + System.currentTimeMillis() + ".txt" ); + ManchesterSyntaxExplanationRenderer sampleRenderer = new ManchesterSynta... [truncated message content] |
From: <jen...@us...> - 2012-03-27 15:09:29
|
Revision: 3623 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3623&view=rev Author: jenslehmann Date: 2012-03-27 15:09:17 +0000 (Tue, 27 Mar 2012) Log Message: ----------- several changes to be able to use the KnowledgeSource interface instead of being forced to use AbstractKnowledgeSource Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/core/AbstractReasonerComponent.java trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java trunk/components-core/src/main/java/org/dllearner/reasoning/DIGReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java trunk/components-core/src/main/java/org/dllearner/reasoning/FastRetrievalReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java trunk/components-core/src/main/java/org/dllearner/utilities/examples/ExampleDataCollector.java trunk/components-core/src/main/java/org/dllearner/utilities/owl/OntologyCloser.java trunk/components-core/src/test/java/org/dllearner/test/ComponentTest.java trunk/components-core/src/test/java/org/dllearner/test/junit/ELDownTests.java trunk/components-core/src/test/java/org/dllearner/test/junit/RefinementOperatorTests.java trunk/components-core/src/test/java/org/dllearner/test/junit/TestOntologies.java trunk/interfaces/src/main/java/org/dllearner/gui/Config.java trunk/scripts/src/main/java/org/dllearner/examples/KRKModular.java trunk/scripts/src/main/java/org/dllearner/scripts/CloseOntology.java trunk/scripts/src/main/java/org/dllearner/scripts/DumbLPFinder.java trunk/scripts/src/main/java/org/dllearner/scripts/NewSample.java trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java trunk/scripts/src/main/java/org/dllearner/scripts/WikipediaCategoryCleaner.java trunk/scripts/src/main/java/org/dllearner/scripts/tiger/TestIterativeLearning.java trunk/test/newcomponent/AristotlePosNeg.conf Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractReasonerComponent.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractReasonerComponent.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractReasonerComponent.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -120,7 +120,7 @@ /** * The underlying knowledge sources. */ - protected Set<AbstractKnowledgeSource> sources; + protected Set<KnowledgeSource> sources; public AbstractReasonerComponent(){ @@ -132,7 +132,7 @@ * @param sources * The underlying knowledge sources. */ - public AbstractReasonerComponent(Set<AbstractKnowledgeSource> sources) { + public AbstractReasonerComponent(Set<KnowledgeSource> sources) { this.sources = sources; } @@ -141,16 +141,16 @@ * * @return The underlying knowledge sources. */ - public Set<AbstractKnowledgeSource> getSources() { + public Set<KnowledgeSource> getSources() { return sources; } - public void setSources(Set<AbstractKnowledgeSource> sources){ + public void setSources(Set<KnowledgeSource> sources){ this.sources = sources; } - public void setSources(AbstractKnowledgeSource... sources) { - this.sources = new HashSet<AbstractKnowledgeSource>(Arrays.asList(sources)); + public void setSources(KnowledgeSource... sources) { + this.sources = new HashSet<KnowledgeSource>(Arrays.asList(sources)); } /** @@ -161,7 +161,7 @@ * @param sources * The new knowledge sources. */ - public void changeSources(Set<AbstractKnowledgeSource> sources) { + public void changeSources(Set<KnowledgeSource> sources) { this.sources = sources; } Modified: trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/core/AnnComponentManager.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -79,7 +79,7 @@ "org.dllearner.kb.OWLFile", "org.dllearner.kb.SparqlEndpointKS", "org.dllearner.kb.sparql.SparqlKnowledgeSource", - "org.dllearner.kb.sparql.SparqlSimpleExtractor", + "org.dllearner.kb.sparql.simple.SparqlSimpleExtractor", "org.dllearner.learningproblems.PosNegLPStandard", "org.dllearner.learningproblems.FuzzyPosNegLPStandard", "org.dllearner.learningproblems.PosOnlyLP", Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -1,6 +1,6 @@ package org.dllearner.kb.sparql.simple; -import org.nlp2rdf.ontology.ClassIndexer; +//import org.nlp2rdf.ontology.ClassIndexer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -9,22 +9,23 @@ public class SchemaIndexer { private static Logger log = LoggerFactory.getLogger(SchemaIndexer.class); - private ClassIndexer classIndexer=null; +// private ClassIndexer classIndexer=null; public SchemaIndexer(){} public void init(){ - classIndexer=new ClassIndexer(); +// classIndexer=new ClassIndexer(); OntModel model = ModelFactory.createOntologyModel(); model.read(SchemaIndexer.class.getResourceAsStream("dbpedia_3-3.6.owl"), null); - classIndexer.index(model); +// classIndexer.index(model); } public OntModel getHierarchyForURI(String classUri){ - if(classIndexer==null){ - this.init(); - } - return classIndexer.getHierarchyForClassURI(classUri); +// if(classIndexer==null){ +// this.init(); +// } +// return classIndexer.getHierarchyForClassURI(classUri); + return null; } Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -27,8 +27,9 @@ private List<String> filters = null; @ConfigOption(name = "recursionDepth", description = "recursion depth", required = true) private int recursionDepth = 0; - @ConfigOption(name = "defaultGraphURI", description = "default graph URI", required = true) - private String defaultGraphURIs=null; + + @ConfigOption(name = "defaultGraphURI", description = "default graph URI", required = true) + private String defaultGraphURI=null; private OWLOntology owlOntology; private static Logger log = LoggerFactory.getLogger(SparqlSimpleExtractor.class); @@ -69,15 +70,15 @@ for (int i = 0; i < recursionDepth - 1; i++) { queryString=aGenerator.createQuery(instances, model, filters); log.info("SPARQL: {}", queryString); - executor.executeQuery(queryString, endpointURL, model,defaultGraphURIs); + executor.executeQuery(queryString, endpointURL, model,defaultGraphURI); } queryString = aGenerator.createLastQuery(instances, model, filters); log.info("SPARQL: {}", queryString); - executor.executeQuery(queryString, endpointURL, model, defaultGraphURIs); + executor.executeQuery(queryString, endpointURL, model, defaultGraphURI); TBoxQueryGenerator tGenerator = new TBoxQueryGenerator(); queryString = tGenerator.createQuery(model, filters, instances); - executor.executeQuery(queryString, endpointURL, model,defaultGraphURIs); + executor.executeQuery(queryString, endpointURL, model,defaultGraphURI); JenaToOwlapiConverter converter = new JenaToOwlapiConverter(); owlOntology=converter.convert(this.model); } @@ -90,6 +91,14 @@ this.endpointURL = endpointURL; } + public String getDefaultGraphURI() { + return defaultGraphURI; + } + + public void setDefaultGraphURI(String defaultGraphURI) { + this.defaultGraphURI = defaultGraphURI; + } + public Model getModel() { return model; } @@ -141,21 +150,8 @@ this.recursionDepth = recursionDepth; } - /** - * @return the defaultGraphURI - */ - public String getDefaultGraphURIs() { - return defaultGraphURIs; - } /** - * @param defaultGraphURI the defaultGraphURI to set - */ - public void setDefaultGraphURIs(String defaultGraphURI) { - this.defaultGraphURIs = defaultGraphURI; - } - - /** * @return */ public OWLOntology getOWLOntology() { Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/DIGReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/DIGReasoner.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/DIGReasoner.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -40,6 +40,7 @@ import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentInitException; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.OntologyFormat; import org.dllearner.core.options.BooleanConfigOption; import org.dllearner.core.options.ConfigEntry; @@ -106,7 +107,7 @@ - public DIGReasoner(Set<AbstractKnowledgeSource> sources) { + public DIGReasoner(Set<KnowledgeSource> sources) { super(sources); try { reasonerURL = new URL("http://localhost:8081"); @@ -148,8 +149,8 @@ // "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " + // "xsi:schemaLocation=\"http://dl.kr.org/dig/2003/02/lang\n" + // "http://dl-web.man.ac.uk/dig/2003/02/dig.xsd\" uri=\""+kbURI+"\">"); - for (AbstractKnowledgeSource source : sources) { - sb.append(source.toDIG(kbURI)); + for (KnowledgeSource source : sources) { + sb.append(((AbstractKnowledgeSource)source).toDIG(kbURI)); ResponseDocument rd = null; try { Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/FastInstanceChecker.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -37,6 +37,7 @@ import org.dllearner.core.ComponentAnn; import org.dllearner.core.ComponentInitException; import org.dllearner.core.ComponentManager; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.ReasoningMethodUnsupportedException; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.owl.Axiom; @@ -150,8 +151,8 @@ Map<DatatypeProperty, Map<Individual, SortedSet<Integer>>> id, Map<DatatypeProperty, TreeSet<Individual>> bdPos, Map<DatatypeProperty, TreeSet<Individual>> bdNeg, - AbstractKnowledgeSource... sources) { - super(new HashSet<AbstractKnowledgeSource>(Arrays.asList(sources))); + KnowledgeSource... sources) { + super(new HashSet<KnowledgeSource>(Arrays.asList(sources))); this.individuals = individuals; this.classInstancesPos = classInstancesPos; this.opPos = opPos; @@ -160,7 +161,7 @@ this.bdNeg = bdNeg; if(rc == null){ - rc = new OWLAPIReasoner(new HashSet<AbstractKnowledgeSource>(Arrays.asList(sources))); + rc = new OWLAPIReasoner(new HashSet<KnowledgeSource>(Arrays.asList(sources))); try { rc.init(); } catch (ComponentInitException e) { @@ -202,12 +203,12 @@ } } - public FastInstanceChecker(Set<AbstractKnowledgeSource> sources) { + public FastInstanceChecker(Set<KnowledgeSource> sources) { super(sources); } - public FastInstanceChecker(AbstractKnowledgeSource... sources) { - super(new HashSet<AbstractKnowledgeSource>(Arrays.asList(sources))); + public FastInstanceChecker(KnowledgeSource... sources) { + super(new HashSet<KnowledgeSource>(Arrays.asList(sources))); } /** @@ -270,9 +271,10 @@ logger.debug("dematerialising object properties"); for (ObjectProperty atomicRole : atomicRoles) { +// System.out.println(atomicRole + " " + rc.getPropertyMembers(atomicRole)); opPos.put(atomicRole, rc.getPropertyMembers(atomicRole)); } - + logger.debug("dematerialising datatype properties"); for (DatatypeProperty dp : booleanDatatypeProperties) { Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/FastRetrievalReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/FastRetrievalReasoner.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/FastRetrievalReasoner.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -28,6 +28,7 @@ import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentInitException; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.ReasoningMethodUnsupportedException; import org.dllearner.core.options.ConfigEntry; import org.dllearner.core.options.InvalidConfigOptionValueException; @@ -56,7 +57,7 @@ AbstractReasonerComponent rc; - public FastRetrievalReasoner(Set<AbstractKnowledgeSource> sources) { + public FastRetrievalReasoner(Set<KnowledgeSource> sources) { super(sources); rc = new OWLAPIReasoner(sources); Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/OWLAPIReasoner.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -179,7 +179,7 @@ } - public OWLAPIReasoner(Set<AbstractKnowledgeSource> sources) { + public OWLAPIReasoner(Set<KnowledgeSource> sources) { super(sources); } Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/PelletReasoner.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -42,6 +42,7 @@ import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentInitException; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.ReasoningMethodUnsupportedException; import org.dllearner.core.options.BooleanConfigOption; import org.dllearner.core.options.ConfigOption; @@ -177,7 +178,7 @@ private List<OWLOntology> owlAPIOntologies = new LinkedList<OWLOntology>(); private boolean defaultNegation = true; - public PelletReasoner(Set<AbstractKnowledgeSource> sources) { + public PelletReasoner(Set<KnowledgeSource> sources) { super(sources); } @@ -198,7 +199,7 @@ Set<OWLOntology> allImports = new HashSet<OWLOntology>(); prefixes = new TreeMap<String, String>(); - for (AbstractKnowledgeSource source : sources) { + for (KnowledgeSource source : sources) { if (source instanceof OWLOntologyKnowledgeSource) { ontology = ((OWLOntologyKnowledgeSource) source).createOWLOntology(manager); @@ -274,7 +275,7 @@ // all other sources are converted to KB and then to an // OWL API ontology } else { - KB kb = source.toKB(); + KB kb = ((AbstractKnowledgeSource)source).toKB(); // System.out.println(kb.toString(null,null)); IRI ontologyIRI = IRI.create("http://example.com"); @@ -460,7 +461,7 @@ Set<OWLOntology> allImports = new HashSet<OWLOntology>(); prefixes = new TreeMap<String, String>(); - for (AbstractKnowledgeSource source : sources) { + for (KnowledgeSource source : sources) { if (source instanceof OWLOntologyKnowledgeSource) { ontology = ((OWLOntologyKnowledgeSource) source).createOWLOntology(manager); @@ -529,7 +530,7 @@ // all other sources are converted to KB and then to an // OWL API ontology } else { - KB kb = source.toKB(); + KB kb = ((AbstractKnowledgeSource)source).toKB(); // System.out.println(kb.toString(null,null)); IRI ontologyIRI = IRI.create("http://example.com"); Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/ProtegeReasoner.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -37,6 +37,7 @@ import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentInitException; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.ReasoningMethodUnsupportedException; import org.dllearner.core.options.BooleanConfigOption; import org.dllearner.core.options.ConfigOption; @@ -163,11 +164,11 @@ private List<OWLOntology> owlAPIOntologies = new LinkedList<OWLOntology>(); private boolean defaultNegation = true; - public ProtegeReasoner(Set<AbstractKnowledgeSource> sources) { + public ProtegeReasoner(Set<KnowledgeSource> sources) { super(sources); } - public ProtegeReasoner(Set<AbstractKnowledgeSource> sources, OWLReasoner reasoner) { + public ProtegeReasoner(Set<KnowledgeSource> sources, OWLReasoner reasoner) { this(sources); this.reasoner = reasoner; } @@ -247,7 +248,7 @@ Set<OWLOntology> allImports = new HashSet<OWLOntology>(); prefixes = new TreeMap<String, String>(); - for (AbstractKnowledgeSource source : sources) { + for (KnowledgeSource source : sources) { if (source instanceof OWLOntologyKnowledgeSource) { ontology = ((OWLOntologyKnowledgeSource) source).createOWLOntology(manager); @@ -311,7 +312,7 @@ // all other sources are converted to KB and then to an // OWL API ontology } else { - KB kb = source.toKB(); + KB kb = ((AbstractKnowledgeSource)source).toKB(); // System.out.println(kb.toString(null,null)); IRI ontologyIRI = IRI.create("http://example.com"); Modified: trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/reasoning/fuzzydll/FuzzyOWLAPIReasoner.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -42,6 +42,7 @@ import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentAnn; import org.dllearner.core.ComponentInitException; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.options.ConfigEntry; import org.dllearner.core.options.ConfigOption; import org.dllearner.core.options.InvalidConfigOptionValueException; @@ -177,7 +178,7 @@ } - public FuzzyOWLAPIReasoner(Set<AbstractKnowledgeSource> sources) { + public FuzzyOWLAPIReasoner(Set<KnowledgeSource> sources) { super(sources); } @@ -244,7 +245,7 @@ Set<OWLOntology> allImports = new HashSet<OWLOntology>(); prefixes = new TreeMap<String,String>(); - for(AbstractKnowledgeSource source : sources) { + for(KnowledgeSource source : sources) { if (source instanceof OWLOntologyKnowledgeSource) { ontology = ((OWLOntologyKnowledgeSource) source).createOWLOntology(manager); @@ -278,7 +279,7 @@ // all other sources are converted to KB and then to an // OWL API ontology } else { - KB kb = source.toKB(); + KB kb = ((AbstractKnowledgeSource)source).toKB(); // System.out.println(kb.toString(null,null)); IRI ontologyURI = IRI.create("http://example.com"); Modified: trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/refinementoperators/RhoDRDown.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -309,7 +309,7 @@ } } frequentValues.put(op, frequentInds); - + } } Modified: trunk/components-core/src/main/java/org/dllearner/utilities/examples/ExampleDataCollector.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/examples/ExampleDataCollector.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/utilities/examples/ExampleDataCollector.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -35,6 +35,7 @@ import org.dllearner.algorithms.ocel.OCEL; import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.ComponentManager; +import org.dllearner.core.KnowledgeSource; import org.dllearner.kb.OWLFile; import org.dllearner.learningproblems.PosNegLPStandard; import org.dllearner.reasoning.FastInstanceChecker; @@ -73,7 +74,7 @@ urls.addAll (convert(baseDir, pos)); urls.addAll (convert(baseDir, neg)); - Set<AbstractKnowledgeSource> tmp = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> tmp = new HashSet<KnowledgeSource>(); try { URL add = new File(baseDir+"tiger.rdf").toURI().toURL(); // add = new File(baseDir+"new.rdf").toURI().toURL(); @@ -94,7 +95,7 @@ // la.getConfigurator().setUseAllConstructor(false); // la.getConfigurator().setUseExistsConstructor(false); la.setUseDataHasValueConstructor(true); - for(AbstractKnowledgeSource ks: tmp){ + for(KnowledgeSource ks: tmp){ ks.init(); } rc.init(); Modified: trunk/components-core/src/main/java/org/dllearner/utilities/owl/OntologyCloser.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/owl/OntologyCloser.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/main/java/org/dllearner/utilities/owl/OntologyCloser.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -27,9 +27,9 @@ import java.util.SortedSet; import java.util.TreeSet; +import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentInitException; -import org.dllearner.core.AbstractKnowledgeSource; -import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.owl.ClassAssertionAxiom; import org.dllearner.core.owl.Description; import org.dllearner.core.owl.EquivalentClassesAxiom; @@ -64,7 +64,7 @@ super(); this.kb = kb; this.kbFile = new KBFile(this.kb); - Set<AbstractKnowledgeSource> ks = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> ks = new HashSet<KnowledgeSource>(); ks.add(this.kbFile); OWLAPIReasoner owlapi = new OWLAPIReasoner(); owlapi.setSources(ks); @@ -85,7 +85,7 @@ SimpleClock sc = new SimpleClock(); sc.printAndSet(); this.kbFile = new KBFile(this.kb); - Set<AbstractKnowledgeSource> ks = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> ks = new HashSet<KnowledgeSource>(); ks.add(this.kbFile); sc.printAndSet("updating reasoner"); Modified: trunk/components-core/src/test/java/org/dllearner/test/ComponentTest.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/ComponentTest.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/test/java/org/dllearner/test/ComponentTest.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -19,20 +19,16 @@ package org.dllearner.test; -import java.io.File; import java.net.MalformedURLException; import java.util.Collections; import java.util.Set; import java.util.TreeSet; import org.dllearner.algorithms.ocel.OCEL; -import org.dllearner.core.ComponentInitException; -import org.dllearner.core.ComponentManager; -import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.AbstractCELA; -import org.dllearner.core.AbstractLearningProblem; -import org.dllearner.core.LearningProblemUnsupportedException; import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.ComponentInitException; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.owl.Individual; import org.dllearner.kb.OWLFile; import org.dllearner.learningproblems.PosNegLPStandard; @@ -55,7 +51,7 @@ // create knowledge source String example = "../examples/family/uncle.owl"; - AbstractKnowledgeSource source = new OWLFile(example); + KnowledgeSource source = new OWLFile(example); // create OWL API reasoning service with standard settings AbstractReasonerComponent reasoner = new OWLAPIReasoner(Collections.singleton(source)); Modified: trunk/components-core/src/test/java/org/dllearner/test/junit/ELDownTests.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/ELDownTests.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/ELDownTests.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -35,10 +35,9 @@ import org.apache.log4j.SimpleLayout; import org.dllearner.algorithms.el.ELDescriptionNode; import org.dllearner.algorithms.el.ELDescriptionTree; -import org.dllearner.core.ComponentInitException; -import org.dllearner.core.ComponentManager; -import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.ComponentInitException; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.owl.Description; import org.dllearner.core.owl.NamedClass; import org.dllearner.core.owl.ObjectProperty; @@ -298,7 +297,7 @@ logger.addAppender(app); String ont = "../test/galen2.owl"; - AbstractKnowledgeSource source = new OWLFile(ont); + KnowledgeSource source = new OWLFile(ont); source.init(); AbstractReasonerComponent reasoner = new OWLAPIReasoner(Collections.singleton(source)); reasoner.init(); @@ -333,7 +332,7 @@ public void asTest() throws ComponentInitException, MalformedURLException { String ont = "../test/galen2.owl"; - AbstractKnowledgeSource source = new OWLFile(ont); + KnowledgeSource source = new OWLFile(ont); source.init(); AbstractReasonerComponent reasoner = new OWLAPIReasoner(Collections.singleton(source)); reasoner.init(); Modified: trunk/components-core/src/test/java/org/dllearner/test/junit/RefinementOperatorTests.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/RefinementOperatorTests.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/RefinementOperatorTests.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -21,8 +21,6 @@ import static org.junit.Assert.assertTrue; -import java.io.File; -import java.net.MalformedURLException; import java.util.Collections; import java.util.Set; import java.util.TreeSet; @@ -30,16 +28,15 @@ import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.dllearner.algorithms.ocel.OCEL; +import org.dllearner.core.AbstractLearningProblem; +import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentInitException; import org.dllearner.core.ComponentManager; -import org.dllearner.core.AbstractKnowledgeSource; -import org.dllearner.core.AbstractLearningProblem; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.LearningProblemUnsupportedException; -import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.owl.ClassHierarchy; import org.dllearner.core.owl.Description; import org.dllearner.core.owl.NamedClass; -import org.dllearner.core.owl.Thing; import org.dllearner.kb.OWLFile; import org.dllearner.learningproblems.PosNegLPStandard; import org.dllearner.parser.KBParser; @@ -71,7 +68,7 @@ public void rhoDRDownTest() { try { String file = "../examples/carcinogenesis/carcinogenesis.owl"; - AbstractKnowledgeSource ks = new OWLFile(file); + KnowledgeSource ks = new OWLFile(file); AbstractReasonerComponent reasoner = new OWLAPIReasoner(Collections.singleton(ks)); reasoner.init(); baseURI = reasoner.getBaseURI(); Modified: trunk/components-core/src/test/java/org/dllearner/test/junit/TestOntologies.java =================================================================== --- trunk/components-core/src/test/java/org/dllearner/test/junit/TestOntologies.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/components-core/src/test/java/org/dllearner/test/junit/TestOntologies.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -19,14 +19,12 @@ package org.dllearner.test.junit; -import java.io.File; -import java.net.MalformedURLException; import java.util.Collections; +import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentInitException; import org.dllearner.core.ComponentManager; -import org.dllearner.core.AbstractKnowledgeSource; -import org.dllearner.core.AbstractReasonerComponent; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.owl.KB; import org.dllearner.kb.KBFile; import org.dllearner.kb.OWLFile; @@ -141,7 +139,7 @@ try { ComponentManager cm = ComponentManager.getInstance(); - AbstractKnowledgeSource source; + KnowledgeSource source; // parse KB string if one has been specified if(!kbString.isEmpty() || ont.equals(TestOntology.EMPTY)) { Modified: trunk/interfaces/src/main/java/org/dllearner/gui/Config.java =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/gui/Config.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/interfaces/src/main/java/org/dllearner/gui/Config.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -28,14 +28,15 @@ import org.apache.log4j.Logger; import org.dllearner.cli.Start; +import org.dllearner.core.AbstractCELA; import org.dllearner.core.AbstractComponent; +import org.dllearner.core.AbstractKnowledgeSource; +import org.dllearner.core.AbstractLearningProblem; +import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentInitException; import org.dllearner.core.ComponentManager; -import org.dllearner.core.AbstractKnowledgeSource; -import org.dllearner.core.AbstractCELA; -import org.dllearner.core.AbstractLearningProblem; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.LearningProblemUnsupportedException; -import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.options.ConfigEntry; import org.dllearner.core.options.ConfigOption; import org.dllearner.kb.KBFile; @@ -44,7 +45,6 @@ import org.dllearner.learningproblems.ClassLearningProblem; import org.dllearner.learningproblems.PosNegLP; import org.dllearner.learningproblems.PosOnlyLP; -import org.dllearner.parser.ParseException; /** * Config save all together used variables: ComponentManager, KnowledgeSource, @@ -215,7 +215,7 @@ */ public AbstractKnowledgeSource changeKnowledgeSource(Class<? extends AbstractKnowledgeSource> clazz) { source = cm.knowledgeSource(clazz); - Set<AbstractKnowledgeSource> sources = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> sources = new HashSet<KnowledgeSource>(); sources.add(source); reasoner.changeSources(sources); // logger.debug("knowledge source " + clazz + " changed"); Modified: trunk/scripts/src/main/java/org/dllearner/examples/KRKModular.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/examples/KRKModular.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/scripts/src/main/java/org/dllearner/examples/KRKModular.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -20,6 +20,7 @@ import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentManager; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.owl.ClassAssertionAxiom; import org.dllearner.core.owl.Description; import org.dllearner.core.owl.Individual; @@ -249,7 +250,7 @@ ComponentManager cm = ComponentManager.getInstance(); AbstractCELA la = null; try { - Set<AbstractKnowledgeSource> sources = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> sources = new HashSet<KnowledgeSource>(); sources.add(new KBFile(kb)); FastInstanceChecker r = new FastInstanceChecker(); r.setSources(sources); @@ -485,7 +486,7 @@ public void initReasonerFact(){ KBFile kbFile = new KBFile(this.kb); - Set<AbstractKnowledgeSource> ks = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> ks = new HashSet<KnowledgeSource>(); ks.add(kbFile); reasoner = new OWLAPIReasoner(ks); @@ -500,7 +501,7 @@ public void initFIC(){ KBFile kbFile = new KBFile(this.kb); - Set<AbstractKnowledgeSource> ks = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> ks = new HashSet<KnowledgeSource>(); ks.add(kbFile); //System.out.println("blabla"); reasoner = new FastInstanceChecker(); Modified: trunk/scripts/src/main/java/org/dllearner/scripts/CloseOntology.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/CloseOntology.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/scripts/src/main/java/org/dllearner/scripts/CloseOntology.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -22,7 +22,7 @@ import java.util.HashSet; import java.util.Set; -import org.dllearner.core.AbstractKnowledgeSource; +import org.dllearner.core.KnowledgeSource; import org.dllearner.kb.OWLFile; import org.dllearner.reasoning.OWLAPIReasoner; import org.dllearner.utilities.owl.OntologyCloserOWLAPI; @@ -60,7 +60,7 @@ // initializing reasoner OWLFile owlFile = new OWLFile(); owlFile.setURL(inputURI.toURL()); - Set<AbstractKnowledgeSource> ks = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> ks = new HashSet<KnowledgeSource>(); ks.add(owlFile); OWLAPIReasoner owlapireasoner = new OWLAPIReasoner(); owlapireasoner.setSources(ks); Modified: trunk/scripts/src/main/java/org/dllearner/scripts/DumbLPFinder.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/DumbLPFinder.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/scripts/src/main/java/org/dllearner/scripts/DumbLPFinder.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -32,9 +32,9 @@ import org.apache.log4j.Logger; import org.apache.log4j.SimpleLayout; import org.dllearner.algorithms.ocel.OCEL; -import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentManager; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.owl.Individual; import org.dllearner.kb.sparql.SparqlKnowledgeSource; import org.dllearner.learningproblems.EvaluatedDescriptionPosNeg; @@ -225,7 +225,7 @@ ks.setPredefinedEndpoint("LOCALJOSEKIBIBLE"); ks.setUseLits(true); - Set<AbstractKnowledgeSource> tmp = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> tmp = new HashSet<KnowledgeSource>(); tmp.add(ks); // reasoner OWLAPIReasoner f = new OWLAPIReasoner(tmp); Modified: trunk/scripts/src/main/java/org/dllearner/scripts/NewSample.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/NewSample.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/scripts/src/main/java/org/dllearner/scripts/NewSample.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -34,10 +34,10 @@ import org.apache.log4j.Logger; import org.apache.log4j.SimpleLayout; import org.dllearner.algorithms.ocel.OCEL; -import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.ComponentInitException; import org.dllearner.core.ComponentManager; import org.dllearner.core.EvaluatedDescription; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.LearningProblemUnsupportedException; import org.dllearner.kb.OWLFile; import org.dllearner.learningproblems.EvaluatedDescriptionPosNeg; @@ -125,7 +125,7 @@ OWLFile ks = new OWLFile(); ks.setUrl(fileURL); - Set<AbstractKnowledgeSource> tmp = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> tmp = new HashSet<KnowledgeSource>(); tmp.add(ks); // reasoner FastInstanceChecker f = new FastInstanceChecker(tmp); Modified: trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/scripts/src/main/java/org/dllearner/scripts/SemanticBibleComparison.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -46,6 +46,7 @@ import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.AbstractReasonerComponent; import org.dllearner.core.ComponentManager; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.owl.Description; import org.dllearner.core.owl.Individual; import org.dllearner.gui.Config; @@ -466,7 +467,7 @@ ks.setRecursionDepth(3); } - Set<AbstractKnowledgeSource> tmp = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> tmp = new HashSet<KnowledgeSource>(); tmp.add(ks); // reasoner OWLAPIReasoner f = new OWLAPIReasoner(tmp); @@ -505,7 +506,7 @@ } OWLFile ks = new OWLFile( fileURL); - Set<AbstractKnowledgeSource> tmp = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> tmp = new HashSet<KnowledgeSource>(); tmp.add(ks); AbstractReasonerComponent f = null; Modified: trunk/scripts/src/main/java/org/dllearner/scripts/WikipediaCategoryCleaner.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/WikipediaCategoryCleaner.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/scripts/src/main/java/org/dllearner/scripts/WikipediaCategoryCleaner.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -37,6 +37,7 @@ import org.dllearner.core.AbstractKnowledgeSource; import org.dllearner.core.ComponentManager; import org.dllearner.core.EvaluatedDescription; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.owl.Individual; import org.dllearner.kb.extraction.ExtractionAlgorithm; import org.dllearner.kb.extraction.Manager; @@ -303,7 +304,7 @@ - Set<AbstractKnowledgeSource> tmp = new HashSet<AbstractKnowledgeSource>(); + Set<KnowledgeSource> tmp = new HashSet<KnowledgeSource>(); tmp.add(ks); // reasoner FastInstanceChecker f = new FastInstanceChecker(tmp); Modified: trunk/scripts/src/main/java/org/dllearner/scripts/tiger/TestIterativeLearning.java =================================================================== --- trunk/scripts/src/main/java/org/dllearner/scripts/tiger/TestIterativeLearning.java 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/scripts/src/main/java/org/dllearner/scripts/tiger/TestIterativeLearning.java 2012-03-27 15:09:17 UTC (rev 3623) @@ -26,6 +26,7 @@ import org.dllearner.core.ComponentManager; import org.dllearner.core.ComponentPool; import org.dllearner.core.EvaluatedDescription; +import org.dllearner.core.KnowledgeSource; import org.dllearner.core.owl.NamedClass; import org.dllearner.kb.OWLFile; import org.dllearner.kb.sparql.Cache; @@ -444,8 +445,8 @@ } - private static Set<AbstractKnowledgeSource> _getOWL(Examples ex) throws Exception { - Set<AbstractKnowledgeSource> tmp = new HashSet<AbstractKnowledgeSource>(); + private static Set<KnowledgeSource> _getOWL(Examples ex) throws Exception { + Set <KnowledgeSource> tmp = new HashSet<KnowledgeSource>(); List<URL> urls = new ArrayList<URL>(); urls.add(new File(backgroundXML).toURI().toURL()); urls.addAll(ExampleDataCollector.convert(sentenceXMLFolder, ex.getPosTrain())); @@ -499,11 +500,11 @@ // } public static FastInstanceChecker _getFastInstanceChecker(Examples ex) throws Exception { - Set<AbstractKnowledgeSource> tmp = _getOWL(ex); + Set<KnowledgeSource> tmp = _getOWL(ex); // Set<KnowledgeSource> tmp = _getSPARQL(ex); FastInstanceChecker rc = new FastInstanceChecker(tmp); - for (AbstractKnowledgeSource ks : tmp) { + for (KnowledgeSource ks : tmp) { ks.init(); } rc.init(); Modified: trunk/test/newcomponent/AristotlePosNeg.conf =================================================================== --- trunk/test/newcomponent/AristotlePosNeg.conf 2012-03-27 12:45:35 UTC (rev 3622) +++ trunk/test/newcomponent/AristotlePosNeg.conf 2012-03-27 15:09:17 UTC (rev 3623) @@ -12,7 +12,7 @@ // SPARQL options sparql.type = "sparqls" sparql.endpointURL = "http://live.dbpedia.org/sparql" -sparql.defaultGraphURIs = {"http://dbpedia.org"} +sparql.defaultGraphURI = "http://dbpedia.org" sparql.recursionDepth = 1 sparql.instances = { @@ -47,3 +47,5 @@ // we use the OCEL algorithm alg.type = "ocel" alg.reasoner = reasoner +alg.learningProblem = lp + This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <lor...@us...> - 2012-03-29 13:09:10
|
Revision: 3624 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3624&view=rev Author: lorenz_b Date: 2012-03-29 13:09:03 +0000 (Thu, 29 Mar 2012) Log Message: ----------- Removed dependencies to SESAME. Changed some examples by reducing execution time. Fixed Unit test to ignore SPARQL examples correctly. Modified Paths: -------------- trunk/components-core/pom.xml trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java trunk/components-core/src/main/java/org/dllearner/utilities/StructureBasedRootClassFinder.java trunk/examples/carcinogenesis/train.conf trunk/examples/family-benchmark/Aunt.conf trunk/interfaces/src/test/java/org/dllearner/test/junit/ExampleTests.java Modified: trunk/components-core/pom.xml =================================================================== --- trunk/components-core/pom.xml 2012-03-27 15:09:17 UTC (rev 3623) +++ trunk/components-core/pom.xml 2012-03-29 13:09:03 UTC (rev 3624) @@ -106,6 +106,10 @@ <artifactId>xercesImpl</artifactId> <groupId>xerces</groupId> </exclusion> + <exclusion> + <artifactId>any23-core</artifactId> + <groupId>org.deri.any23</groupId> + </exclusion> </exclusions> </dependency> Modified: trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2012-03-27 15:09:17 UTC (rev 3623) +++ trunk/components-core/src/main/java/org/dllearner/algorithms/DisjointClassesLearner.java 2012-03-29 13:09:03 UTC (rev 3624) @@ -21,10 +21,8 @@ import java.net.URL; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -46,11 +44,8 @@ import org.dllearner.core.owl.NamedClass; import org.dllearner.kb.LocalModelBasedSparqlEndpointKS; import org.dllearner.kb.SparqlEndpointKS; -import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.learningproblems.AxiomScore; import org.dllearner.learningproblems.Heuristics; -import org.openrdf.model.vocabulary.OWL; -import org.openrdf.model.vocabulary.RDF; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,6 +56,8 @@ import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Resource; +import com.hp.hpl.jena.vocabulary.OWL2; +import com.hp.hpl.jena.vocabulary.RDF; /** * Learns disjoint classes using SPARQL queries. @@ -340,7 +337,7 @@ //secondly, create disjoint classexpressions with score 1 - (#occurence/#all) for(Entry<NamedClass, Integer> entry : sortByValues(class2Count)){ //drop classes from OWL and RDF namespace - if(entry.getKey().getName().startsWith(OWL.NAMESPACE) || entry.getKey().getName().startsWith(RDF.NAMESPACE))continue; + if(entry.getKey().getName().startsWith(OWL2.getURI()) || entry.getKey().getName().startsWith(RDF.getURI()))continue; // evalDesc = new EvaluatedDescription(entry.getKey(), // new AxiomScore(1 - (entry.getValue() / (double)all))); double[] confidenceInterval = Heuristics.getConfidenceInterval95Wald(total, entry.getValue()); Modified: trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-03-27 15:09:17 UTC (rev 3623) +++ trunk/components-core/src/main/java/org/dllearner/core/AbstractAxiomLearningAlgorithm.java 2012-03-29 13:09:03 UTC (rev 3624) @@ -44,9 +44,6 @@ import org.dllearner.learningproblems.Heuristics; import org.dllearner.reasoning.SPARQLReasoner; import org.dllearner.utilities.owl.AxiomComparator; -import org.openrdf.model.vocabulary.OWL; -import org.openrdf.model.vocabulary.RDF; -import org.openrdf.model.vocabulary.RDFS; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -58,6 +55,9 @@ import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; import com.hp.hpl.jena.util.iterator.Filter; +import com.hp.hpl.jena.vocabulary.OWL2; +import com.hp.hpl.jena.vocabulary.RDF; +import com.hp.hpl.jena.vocabulary.RDFS; /** * @author Lorenz Bühmann @@ -334,7 +334,7 @@ @Override public boolean accept(OntClass cls) { if(!cls.isAnon()){ - return cls.getURI().startsWith(OWL.NAMESPACE); + return cls.getURI().startsWith(OWL2.getURI()); } return false; } @@ -346,7 +346,7 @@ @Override public boolean accept(OntClass cls) { if(!cls.isAnon()){ - return cls.getURI().startsWith(RDFS.NAMESPACE); + return cls.getURI().startsWith(RDFS.getURI()); } return false; } @@ -358,7 +358,7 @@ @Override public boolean accept(OntClass cls) { if(!cls.isAnon()){ - return cls.getURI().startsWith(RDF.NAMESPACE); + return cls.getURI().startsWith(RDF.getURI()); } return false; } Modified: trunk/components-core/src/main/java/org/dllearner/utilities/StructureBasedRootClassFinder.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/utilities/StructureBasedRootClassFinder.java 2012-03-27 15:09:17 UTC (rev 3623) +++ trunk/components-core/src/main/java/org/dllearner/utilities/StructureBasedRootClassFinder.java 2012-03-29 13:09:03 UTC (rev 3624) @@ -8,7 +8,6 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.httpclient.methods.GetMethod; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.AddAxiom; import org.semanticweb.owlapi.model.AxiomType; @@ -47,10 +46,6 @@ import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; -import uk.ac.manchester.cs.owlapi.modularity.ModuleType; - -import com.clarkparsia.modularity.ModularityUtils; -import com.clarkparsia.owlapi.modularity.locality.LocalityClass; import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory; public class StructureBasedRootClassFinder implements RootClassFinder, OWLClassExpressionVisitor { Modified: trunk/examples/carcinogenesis/train.conf =================================================================== --- trunk/examples/carcinogenesis/train.conf 2012-03-27 15:09:17 UTC (rev 3623) +++ trunk/examples/carcinogenesis/train.conf 2012-03-29 13:09:03 UTC (rev 3624) @@ -355,7 +355,7 @@ "kb:d335" } -op.type = "syncrho" +op.type = "rho" op.cardinalityLimit = 5 // CELOE configuration @@ -366,7 +366,7 @@ alg.writeSearchTree = false alg.noisePercentage = 32 alg.startClass = "http://dl-learner.org/carcinogenesis#Compound" -alg.maxExecutionTimeInSeconds = 1800 +alg.maxExecutionTimeInSeconds = 100 //alg.maxClassDescriptionTests = 10000000 // PCELOE configuration Modified: trunk/examples/family-benchmark/Aunt.conf =================================================================== --- trunk/examples/family-benchmark/Aunt.conf 2012-03-27 15:09:17 UTC (rev 3623) +++ trunk/examples/family-benchmark/Aunt.conf 2012-03-29 13:09:03 UTC (rev 3624) @@ -17,7 +17,7 @@ h.type ="celoe_heuristic" h.expansionPenaltyFactor = 0.02 alg.type = "celoe" -alg.maxExecutionTimeInSeconds = 200 +alg.maxExecutionTimeInSeconds = 100 alg.terminateOnNoiseReached = true Modified: trunk/interfaces/src/test/java/org/dllearner/test/junit/ExampleTests.java =================================================================== --- trunk/interfaces/src/test/java/org/dllearner/test/junit/ExampleTests.java 2012-03-27 15:09:17 UTC (rev 3623) +++ trunk/interfaces/src/test/java/org/dllearner/test/junit/ExampleTests.java 2012-03-29 13:09:03 UTC (rev 3624) @@ -163,13 +163,13 @@ // start example CLI start = new CLI(new File(conf)); start.init(); - start.run(); // System.out.println("algorithm: " + start.getLearningAlgorithm()); boolean isSparql = start.getKnowledgeSource() instanceof SparqlKnowledgeSource; // boolean isSparql = false; LearningAlgorithm algorithm = start.getLearningAlgorithm(); if((testGP || !(algorithm instanceof GP)) && (sparql == 0 || (sparql == 1 && isSparql) || (sparql == 2 && !isSparql) ) ) { + start.run(); started = true; // start.start(false); // test is successful if a concept was learned This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sha...@us...> - 2012-04-08 20:53:13
|
Revision: 3628 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3628&view=rev Author: shadowtm Date: 2012-04-08 20:53:07 +0000 (Sun, 08 Apr 2012) Log Message: ----------- Removed the semanticservices repository as it was returning 500 Internal Error codes to developers who hadn't retrieved the pdb2rdf jars previously. These jars have been placed into the AKSW Archiva so that they can be retrieved - therefore, there is no longer a need to depend on the semanticservices repo. Modified Paths: -------------- trunk/pom.xml trunk/scripts/pom.xml Modified: trunk/pom.xml =================================================================== --- trunk/pom.xml 2012-04-04 15:12:31 UTC (rev 3627) +++ trunk/pom.xml 2012-04-08 20:53:07 UTC (rev 3628) @@ -390,6 +390,17 @@ <groupId>com.dumontierlab</groupId> <artifactId>pdb2rdf-parser</artifactId> <version>0.0.8</version> + <exclusions> + <exclusion> + <groupId>org.slf4j</groupId> + <artifactId>slf4j-log4j12</artifactId> + </exclusion> + <!--This pom is including a modified version of Jena that is out of sync with what we're using--> + <exclusion> + <groupId>com.hp.hpl.jena</groupId> + <artifactId>jena</artifactId> + </exclusion> + </exclusions> </dependency> <dependency> <groupId>com.dumontierlab</groupId> @@ -535,30 +546,24 @@ <enabled>true</enabled> </snapshots> </repository> - <!-- This repository contains the jars for pdb2rdf --> - <repository> - <id>semanticscience.org</id> - <name>Semanticscience projects</name> - <url>http://s1.semanticscience.org:8080/nexus/content/groups/public/</url> - </repository> - - <repository> - <id>apache-repo-releases</id> - <url>https://repository.apache.org/content/repositories/releases/</url> - <releases> - <enabled>true</enabled> - </releases> -</repository> - <repository> - <id>apache-repo-snapshots</id> - <url>https://repository.apache.org/content/repositories/snapshots/</url> - <releases> - <enabled>false</enabled> - </releases> - <snapshots> - <enabled>true</enabled> - </snapshots> - </repository> + + <repository> + <id>apache-repo-releases</id> + <url>https://repository.apache.org/content/repositories/releases/</url> + <releases> + <enabled>true</enabled> + </releases> + </repository> + <repository> + <id>apache-repo-snapshots</id> + <url>https://repository.apache.org/content/repositories/snapshots/</url> + <releases> + <enabled>false</enabled> + </releases> + <snapshots> + <enabled>true</enabled> + </snapshots> + </repository> </repositories> <pluginRepositories> Modified: trunk/scripts/pom.xml =================================================================== --- trunk/scripts/pom.xml 2012-04-04 15:12:31 UTC (rev 3627) +++ trunk/scripts/pom.xml 2012-04-08 20:53:07 UTC (rev 3628) @@ -37,18 +37,12 @@ <groupId>postgresql</groupId> <artifactId>postgresql</artifactId> </dependency> + <dependency> + <groupId>com.dumontierlab</groupId> + <artifactId>pdb2rdf-parser</artifactId> + </dependency> <dependency> <groupId>com.dumontierlab</groupId> - <artifactId>pdb2rdf-parser</artifactId> - <exclusions> - <exclusion> - <artifactId>slf4j-log4j12</artifactId> - <groupId>org.slf4j</groupId> - </exclusion> - </exclusions> - </dependency> - <dependency> - <groupId>com.dumontierlab</groupId> <artifactId>pdb2rdf-cli</artifactId> </dependency> <dependency> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2012-04-11 13:48:38
|
Revision: 3633 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3633&view=rev Author: kurzum Date: 2012-04-11 13:48:27 +0000 (Wed, 11 Apr 2012) Log Message: ----------- added more speed logging Modified Paths: -------------- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/QueryExecutor.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/TBoxQueryGenerator.java trunk/components-core/src/main/java/org/nlp2rdf/ontology/ClassIndexer.java trunk/examples/sparql/difference/DifferenceDBpediaYAGO_angela_vs_hillary.conf trunk/test/newcomponent/AristotlePosNeg.conf Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java 2012-04-11 12:51:42 UTC (rev 3632) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/ABoxQueryGenerator.java 2012-04-11 13:48:27 UTC (rev 3633) @@ -1,5 +1,5 @@ /** - * + * */ package org.dllearner.kb.sparql.simple; @@ -10,13 +10,15 @@ import com.hp.hpl.jena.ontology.Individual; import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.util.iterator.ExtendedIterator; +import com.jamonapi.Monitor; +import com.jamonapi.MonitorFactory; /** * @author didierc - * */ public class ABoxQueryGenerator { public String createQuery(List<String> individuals, OntModel model, List<String> filters) { + Monitor monABoxQueryGeneration = MonitorFactory.getTimeMonitor("ABox query generator").start(); StringBuilder builder = new StringBuilder(); builder.append("CONSTRUCT {?s ?p ?o} "); builder.append("{ ?s ?p ?o ."); @@ -41,14 +43,23 @@ } } builder.append("}"); + monABoxQueryGeneration.stop(); return builder.toString(); } - + public String createLastQuery(List<String> individuals, OntModel model, List<String> filters) { + Monitor monABoxQueryGeneration = MonitorFactory.getTimeMonitor("ABox query generator") + .start(); StringBuilder builder = new StringBuilder(); - builder.append("CONSTRUCT {?s ?p ?o . ?o a ?class} "); - builder.append("{ ?s ?p ?o ."); - builder.append("?o a ?class"); + if (false) { + builder.append("CONSTRUCT {?s ?p ?o . ?o a ?class} "); + builder.append("{ ?s ?p ?o ."); + builder.append("?o a ?class"); + } else { + builder.append("CONSTRUCT {?s ?p ?o } "); + builder.append("{ ?s ?p ?o "); + } + List<String> curIndividuals; if (model.isEmpty()) { curIndividuals = individuals; @@ -71,10 +82,13 @@ } } builder.append("}"); + monABoxQueryGeneration.stop(); return builder.toString(); } - - private List<String> getIndividualsFromModel(OntModel model) { + + private List<String> getIndividualsFromModel + (OntModel + model) { ExtendedIterator<Individual> iterator = model.listIndividuals(); LinkedList<String> result = new LinkedList<String>(); while (iterator.hasNext()) { @@ -82,8 +96,9 @@ } return result; } - - public List<String> difference(List<String> a, List<String> b) { + + public List<String> difference + (List<String> a, List<String> b) { ArrayList<String> result = new ArrayList<String>(b); result.removeAll(a); return result; Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/QueryExecutor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/QueryExecutor.java 2012-04-11 12:51:42 UTC (rev 3632) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/QueryExecutor.java 2012-04-11 13:48:27 UTC (rev 3633) @@ -1,8 +1,10 @@ /** - * + * */ package org.dllearner.kb.sparql.simple; +import com.jamonapi.Monitor; +import com.jamonapi.MonitorFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -14,26 +16,29 @@ /** * @author didierc - * */ public class QueryExecutor { - - private static Logger log=LoggerFactory.getLogger(QueryExecutor.class); - + + private static Logger log = LoggerFactory.getLogger(QueryExecutor.class); + public OntModel executeQuery(String queryString, String endpoint, OntModel model) { + Monitor monQueryingTotal = MonitorFactory.start("Query time total").start(); Query query = QueryFactory.create(queryString); log.debug("Jena Query: ", query); QueryExecution qExec = QueryExecutionFactory.sparqlService(endpoint, query); qExec.execConstruct(model); + monQueryingTotal.stop(); return model; } - + public OntModel executeQuery(String queryString, String endpoint, OntModel model, String defaultGraphURI) { + Monitor monQueryingTotal = MonitorFactory.start("Query time total").start(); Query query = QueryFactory.create(queryString); log.debug("Jena Query: ", query); QueryExecution qExec = QueryExecutionFactory.sparqlService(endpoint, query, defaultGraphURI); - log.debug("Qexec: {}",qExec); + log.debug("Qexec: {}", qExec); qExec.execConstruct(model); + monQueryingTotal.stop(); return model; } } Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java 2012-04-11 12:51:42 UTC (rev 3632) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SchemaIndexer.java 2012-04-11 13:48:27 UTC (rev 3633) @@ -4,6 +4,8 @@ import java.io.FileInputStream; import java.io.FileNotFoundException; +import com.jamonapi.Monitor; +import com.jamonapi.MonitorFactory; import org.nlp2rdf.ontology.ClassIndexer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -13,42 +15,45 @@ public class SchemaIndexer { private static Logger log = LoggerFactory.getLogger(SchemaIndexer.class); - private ClassIndexer classIndexer=null; - private OntModel model; - private File ontologySchema; - - public SchemaIndexer(){} - - public void init(){ - classIndexer=new ClassIndexer(); + private ClassIndexer classIndexer = null; + private OntModel model; + private File ontologySchema; + + public SchemaIndexer() { + } + + public void init() { + classIndexer = new ClassIndexer(); model = ModelFactory.createOntologyModel(); try { - model.read(new FileInputStream(ontologySchema), null); - } catch (FileNotFoundException e) { - log.error(e.getMessage(),e); - } + Monitor m0 = MonitorFactory.start("Indexer parsing ontology"); + model.read(new FileInputStream(ontologySchema), null); + m0.stop(); + } catch (FileNotFoundException e) { + log.error(e.getMessage(), e); + } classIndexer.index(model); } - - public OntModel getHierarchyForURI(String classUri){ - if(classIndexer==null){ + + public OntModel getHierarchyForURI(String classUri) { + if (classIndexer == null) { this.init(); } return classIndexer.getHierarchyForClassURI(classUri); } - - public static void main(String...args){ - SchemaIndexer i= new SchemaIndexer(); - System.out.println(i.getHierarchyForURI("http://dbpedia.org/ontology/Software")); + + public static void main(String... args) { + SchemaIndexer i = new SchemaIndexer(); + System.out.println(i.getHierarchyForURI("http://dbpedia.org/ontology/Software")); } - public File getOntologySchema() { - return ontologySchema; - } + public File getOntologySchema() { + return ontologySchema; + } - public void setOntologySchema(File ontologySchema) { - this.ontologySchema = ontologySchema; - } - - + public void setOntologySchema(File ontologySchema) { + this.ontologySchema = ontologySchema; + } + + } \ No newline at end of file Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-04-11 12:51:42 UTC (rev 3632) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/SparqlSimpleExtractor.java 2012-04-11 13:48:27 UTC (rev 3633) @@ -24,224 +24,224 @@ @ComponentAnn(name = "efficient SPARQL fragment extractor", shortName = "sparqls", version = 0.1) public class SparqlSimpleExtractor implements KnowledgeSource { - @ConfigOption(name = "endpointURL", description = "URL of the SPARQL endpoint", required = true) - private String endpointURL = null; - private OntModel model = null; - @ConfigOption(name = "instances", description = "List of the instances to use", required = true) - private List<String> instances = null; - @ConfigOption(name = "filters", description = "List of the filters to use", required = true) - private List<String> filters = null; - @ConfigOption(name = "recursionDepth", description = "recursion depth", required = true) - private int recursionDepth = 0; + @ConfigOption(name = "endpointURL", description = "URL of the SPARQL endpoint", required = true) + private String endpointURL = null; + private OntModel model = null; + @ConfigOption(name = "instances", description = "List of the instances to use", required = true) + private List<String> instances = null; + @ConfigOption(name = "filters", description = "List of the filters to use", required = true) + private List<String> filters = null; + @ConfigOption(name = "recursionDepth", description = "recursion depth", required = true) + private int recursionDepth = 0; - @ConfigOption(name = "defaultGraphURI", description = "default graph URI", required = true) - private String defaultGraphURI = null; - @ConfigOption(name = "sparqlQuery", description = "Sparql Query", required = false) - private String sparqlQuery = null; - @ConfigOption(name = "ontologyFile", description = "Ontology Schema File", required = true) - private File ontologyFile = null; - private OWLOntology owlOntology; - private SchemaIndexer indexer; + @ConfigOption(name = "defaultGraphURI", description = "default graph URI", required = true) + private String defaultGraphURI = null; + @ConfigOption(name = "sparqlQuery", description = "Sparql Query", required = false) + private String sparqlQuery = null; + @ConfigOption(name = "ontologyFile", description = "Ontology Schema File", required = true) + private File ontologyFile = null; + private OWLOntology owlOntology; + private SchemaIndexer indexer; - private static Logger log = LoggerFactory - .getLogger(SparqlSimpleExtractor.class); + private static Logger log = LoggerFactory.getLogger(SparqlSimpleExtractor.class); - public SparqlSimpleExtractor() { - model = ModelFactory.createOntologyModel(); - } + public SparqlSimpleExtractor() { + model = ModelFactory.createOntologyModel(); + } - /** - * @param args - * @throws ComponentInitException - */ - public static void main(String[] args) throws ComponentInitException { - SparqlSimpleExtractor extractor = new SparqlSimpleExtractor(); - extractor.setEndpointURL("http://live.dbpedia.org/sparql"); - extractor.setRecursionDepth(1); - extractor.setDefaultGraphURI("http://dbpedia.org"); - List<String> instances = new ArrayList<String>(7); - instances.add("http://dbpedia.org/resource/Democritus"); - instances.add("http://dbpedia.org/resource/Zeno_of_Elea"); - instances.add("http://dbpedia.org/resource/Plato"); - instances.add("http://dbpedia.org/resource/Socrates"); - instances.add("http://dbpedia.org/resource/Archytas"); - instances.add("http://dbpedia.org/resource/Pythagoras"); - instances.add("http://dbpedia.org/resource/Philolaus"); + /** + * @param args + * @throws ComponentInitException + */ + public static void main(String[] args) throws ComponentInitException { + SparqlSimpleExtractor extractor = new SparqlSimpleExtractor(); + extractor.setEndpointURL("http://live.dbpedia.org/sparql"); + extractor.setRecursionDepth(1); + extractor.setDefaultGraphURI("http://dbpedia.org"); + List<String> instances = new ArrayList<String>(7); + instances.add("http://dbpedia.org/resource/Democritus"); + instances.add("http://dbpedia.org/resource/Zeno_of_Elea"); + instances.add("http://dbpedia.org/resource/Plato"); + instances.add("http://dbpedia.org/resource/Socrates"); + instances.add("http://dbpedia.org/resource/Archytas"); + instances.add("http://dbpedia.org/resource/Pythagoras"); + instances.add("http://dbpedia.org/resource/Philolaus"); - extractor.setInstances(instances); - extractor.init(); - List<String> individuals = new LinkedList<String>(); - individuals.add("People"); - individuals.add("Animals"); - extractor.setInstances(individuals); - // System.out.println(extractor.createQuery()); - } + extractor.setInstances(instances); + extractor.init(); + List<String> individuals = new LinkedList<String>(); + individuals.add("People"); + individuals.add("Animals"); + extractor.setInstances(individuals); + // System.out.println(extractor.createQuery()); + } - @Override - public void init() throws ComponentInitException { - if (endpointURL == null) { - throw new ComponentInitException( - "Parameter endpoint URL is required"); - } - if (instances == null) { - throw new ComponentInitException("Parameter instances is required"); - } - if (recursionDepth == 0) { - throw new ComponentInitException( - "A value bigger than 0 is required for parameter recursionDepth"); - } - if (ontologyFile == null) { - throw new ComponentInitException( - "An ontology schema description file (ontologyFile) in RDF ist required"); - } - Monitor monComp = MonitorFactory.start("Simple SPARQL Component") - .start(); - Monitor monIndexer = MonitorFactory.start("Schema Indexer").start(); - indexer = new SchemaIndexer(); - indexer.setOntologySchema(ontologyFile); - indexer.init(); - monIndexer.stop(); + @Override + public void init() throws ComponentInitException { + if (endpointURL == null) { + throw new ComponentInitException( + "Parameter endpoint URL is required"); + } + if (instances == null) { + throw new ComponentInitException("Parameter instances is required"); + } + if (recursionDepth == 0) { + throw new ComponentInitException( + "A value bigger than 0 is required for parameter recursionDepth"); + } + if (ontologyFile == null) { + throw new ComponentInitException( + "An ontology schema description file (ontologyFile) in RDF ist required"); + } + Monitor monComp = MonitorFactory.start("Simple SPARQL Component") + .start(); + Monitor monIndexer = MonitorFactory.start("Schema Indexer").start(); + indexer = new SchemaIndexer(); + indexer.setOntologySchema(ontologyFile); + indexer.init(); + monIndexer.stop(); - Monitor monAquery; - Monitor monQuerying; - QueryExecutor executor = new QueryExecutor(); - String queryString; - if (sparqlQuery == null) { - ABoxQueryGenerator aGenerator = new ABoxQueryGenerator(); - for (int i = 0; i < recursionDepth - 1; i++) { - monAquery = MonitorFactory.getTimeMonitor("A query generator") - .start(); - queryString = aGenerator.createQuery(instances, model, filters); - monAquery.stop(); - log.debug("SPARQL: {}", queryString); - monQuerying = MonitorFactory.start("Querying"); - executor.executeQuery(queryString, endpointURL, model, - defaultGraphURI); - monQuerying.stop(); - } - monAquery = MonitorFactory.getTimeMonitor("A query generator") - .start(); - queryString = aGenerator.createLastQuery(instances, model, filters); - monAquery.stop(); - log.debug("SPARQL: {}", queryString); - monQuerying = MonitorFactory.start("Querying"); - executor.executeQuery(queryString, endpointURL, model, defaultGraphURI); - monQuerying.stop(); - } else { - monQuerying=MonitorFactory.getTimeMonitor("Querying").start(); - executor.executeQuery(sparqlQuery, endpointURL, model); - monQuerying.stop(); - } - + Monitor monQueryingABox; + QueryExecutor executor = new QueryExecutor(); + String queryString; + if (sparqlQuery == null) { + ABoxQueryGenerator aGenerator = new ABoxQueryGenerator(); + for (int i = 0; i < recursionDepth - 1; i++) { - TBoxQueryGenerator tGenerator = new TBoxQueryGenerator(); - Monitor monTquery = MonitorFactory.getTimeMonitor("T query generator") - .start(); - queryString = tGenerator.createQuery(model, filters, instances); - monTquery.stop(); - monQuerying = MonitorFactory.start("Querying"); - executor.executeQuery(queryString, endpointURL, model, defaultGraphURI); - monQuerying.stop(); - Set<OntClass> classes = model.listClasses().toSet(); - for (OntClass ontClass : classes) { - OntModel hierarchy = indexer.getHierarchyForURI(ontClass.getURI()); - if (hierarchy != null) { - model.add(hierarchy); - log.debug("{}", model); - } - } - JenaToOwlapiConverter converter = new JenaToOwlapiConverter(); - owlOntology = converter.convert(this.model); - monComp.stop(); - log.info("*******Simple SPARQL Extractor********"); - for (Monitor monitor : MonitorFactory.getRootMonitor().getMonitors()) { - log.info("* {} *", monitor); - } - log.info("**************************************"); - } + queryString = aGenerator.createQuery(instances, model, filters); + log.debug("SPARQL: {}", queryString); + monQueryingABox = MonitorFactory.start("ABox query time"); + executor.executeQuery(queryString, endpointURL, model, + defaultGraphURI); + monQueryingABox.stop(); + } - public String getEndpointURL() { - return endpointURL; - } - public void setEndpointURL(String endpointURL) { - this.endpointURL = endpointURL; - } + queryString = aGenerator.createLastQuery(instances, model, filters); - public String getDefaultGraphURI() { - return defaultGraphURI; - } + log.debug("SPARQL: {}", queryString); - public void setDefaultGraphURI(String defaultGraphURI) { - this.defaultGraphURI = defaultGraphURI; - } + monQueryingABox = MonitorFactory.start("ABox query time"); + Monitor monQueryingABox2 = MonitorFactory.start("ABox query time last query"); + executor.executeQuery(queryString, endpointURL, model, defaultGraphURI); + monQueryingABox.stop(); + monQueryingABox2.stop(); - public Model getModel() { - return model; - } + } else { + monQueryingABox = MonitorFactory.getTimeMonitor("ABox query time").start(); + executor.executeQuery(sparqlQuery, endpointURL, model); + monQueryingABox.stop(); + } - public void setModel(OntModel model) { - this.model = model; - } - /** - * @return the filters - */ - public List<String> getFilters() { - return filters; - } + TBoxQueryGenerator tGenerator = new TBoxQueryGenerator(); - /** - * @param filters - * the filters to set - */ - public void setFilters(List<String> filters) { - this.filters = filters; - } + //TODO check if all instances are queried. model.listIndividuals().toSet() + queryString = tGenerator.createQuery(model, filters, instances); - /** - * @return the instances - */ - public List<String> getInstances() { - return instances; - } + Monitor monQueryingTBox = MonitorFactory.start("TBox query time"); + executor.executeQuery(queryString, endpointURL, model, defaultGraphURI); + monQueryingTBox.stop(); - /** - * @param instances - * the instances to set - */ - public void setInstances(List<String> instances) { - this.instances = instances; - } + Monitor monIndexing = MonitorFactory.start("Querying index and conversion"); + Set<OntClass> classes = model.listClasses().toSet(); + for (OntClass ontClass : classes) { + OntModel hierarchy = indexer.getHierarchyForURI(ontClass.getURI()); + if (hierarchy != null) { + model.add(hierarchy); + log.debug("{}", model); + } + } + JenaToOwlapiConverter converter = new JenaToOwlapiConverter(); + owlOntology = converter.convert(this.model); + monIndexing.stop(); + monComp.stop(); + log.info("*******Simple SPARQL Extractor********"); + for (Monitor monitor : MonitorFactory.getRootMonitor().getMonitors()) { + log.info("* {} *", monitor); + } + log.info("**************************************"); + } - /** - * @return the recursionDepth - */ - public int getRecursionDepth() { - return recursionDepth; - } + public String getEndpointURL() { + return endpointURL; + } - /** - * @param recursionDepth - * the recursionDepth to set - */ - public void setRecursionDepth(int recursionDepth) { - this.recursionDepth = recursionDepth; - } + public void setEndpointURL(String endpointURL) { + this.endpointURL = endpointURL; + } - /** - * @return - */ - public OWLOntology getOWLOntology() { - return owlOntology; - } + public String getDefaultGraphURI() { + return defaultGraphURI; + } - public File getOntologyFile() { - return ontologyFile; - } + public void setDefaultGraphURI(String defaultGraphURI) { + this.defaultGraphURI = defaultGraphURI; + } - public void setOntologyFile(File ontologyFile) { - this.ontologyFile = ontologyFile; - } + public Model getModel() { + return model; + } + public void setModel(OntModel model) { + this.model = model; + } + + /** + * @return the filters + */ + public List<String> getFilters() { + return filters; + } + + /** + * @param filters the filters to set + */ + public void setFilters(List<String> filters) { + this.filters = filters; + } + + /** + * @return the instances + */ + public List<String> getInstances() { + return instances; + } + + /** + * @param instances the instances to set + */ + public void setInstances(List<String> instances) { + this.instances = instances; + } + + /** + * @return the recursionDepth + */ + public int getRecursionDepth() { + return recursionDepth; + } + + /** + * @param recursionDepth the recursionDepth to set + */ + public void setRecursionDepth(int recursionDepth) { + this.recursionDepth = recursionDepth; + } + + /** + * @return + */ + public OWLOntology getOWLOntology() { + return owlOntology; + } + + public File getOntologyFile() { + return ontologyFile; + } + + public void setOntologyFile(File ontologyFile) { + this.ontologyFile = ontologyFile; + } + } Modified: trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/TBoxQueryGenerator.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/TBoxQueryGenerator.java 2012-04-11 12:51:42 UTC (rev 3632) +++ trunk/components-core/src/main/java/org/dllearner/kb/sparql/simple/TBoxQueryGenerator.java 2012-04-11 13:48:27 UTC (rev 3633) @@ -8,6 +8,8 @@ import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.jamonapi.Monitor; +import com.jamonapi.MonitorFactory; /** * @author didierc @@ -16,6 +18,8 @@ public class TBoxQueryGenerator { public String createQuery(OntModel model, List<String> filters, List<String> individuals) { + Monitor monTquery = MonitorFactory.getTimeMonitor("TBox query generator") + .start(); StringBuilder builder = new StringBuilder( "CONSTRUCT { ?example a ?class . } "); builder.append("{ ?example a ?class . "); @@ -34,6 +38,7 @@ } } builder.append("}"); + monTquery.stop(); return builder.toString(); } Modified: trunk/components-core/src/main/java/org/nlp2rdf/ontology/ClassIndexer.java =================================================================== --- trunk/components-core/src/main/java/org/nlp2rdf/ontology/ClassIndexer.java 2012-04-11 12:51:42 UTC (rev 3632) +++ trunk/components-core/src/main/java/org/nlp2rdf/ontology/ClassIndexer.java 2012-04-11 13:48:27 UTC (rev 3633) @@ -25,9 +25,13 @@ import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModelSpec; import com.hp.hpl.jena.rdf.model.ModelFactory; +import com.hp.hpl.jena.util.iterator.ExtendedIterator; +import com.jamonapi.Monitor; +import com.jamonapi.MonitorFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.naming.ldap.ExtendedRequest; import java.util.*; @@ -55,11 +59,24 @@ } public void index(OntModel from) { - Set<OntClass> classes = from.listClasses().toSet(); + + // Set<OntClass> classes = from.listClasses(); int i = 0; - for (OntClass cl : classes) { + OntClass cl; + for (ExtendedIterator<OntClass> it = from.listClasses(); it.hasNext(); ) { + Monitor m0 = MonitorFactory.start("Indexer listClasses"); + cl = it.next(); + m0.stop(); + //for (OntClass cl : classes) { + Monitor m1 = MonitorFactory.start("Indexer generating tree"); Tree t = new Tree(cl); - classUriToClassHierarchy.put(cl.getURI(), t.toModel()); + m1.stop(); + Monitor m2 = MonitorFactory.start("Indexer generating model"); + OntModel m = t.toModel(); + m2.stop(); + Monitor m3 = MonitorFactory.start("Indexer generating hashmap"); + classUriToClassHierarchy.put(cl.getURI(), m); + m3.stop(); } } Modified: trunk/examples/sparql/difference/DifferenceDBpediaYAGO_angela_vs_hillary.conf =================================================================== --- trunk/examples/sparql/difference/DifferenceDBpediaYAGO_angela_vs_hillary.conf 2012-04-11 12:51:42 UTC (rev 3632) +++ trunk/examples/sparql/difference/DifferenceDBpediaYAGO_angela_vs_hillary.conf 2012-04-11 13:48:27 UTC (rev 3633) @@ -20,7 +20,7 @@ // SPARQL options sparql.type = "SPARQL endpoint fragment" -sparql.url = "http://dbpedia.openlinksw.com:8890/sparql" +sparql.url = "http://live.dbpedia.org/sparql" sparql.defaultGraphURIs = {"http://dbpedia.org"} sparql.recursionDepth = 1 //TODOREFACTOR check if predefinedFilter works at all Modified: trunk/test/newcomponent/AristotlePosNeg.conf =================================================================== --- trunk/test/newcomponent/AristotlePosNeg.conf 2012-04-11 12:51:42 UTC (rev 3632) +++ trunk/test/newcomponent/AristotlePosNeg.conf 2012-04-11 13:48:27 UTC (rev 3633) @@ -14,6 +14,7 @@ sparql.endpointURL = "http://live.dbpedia.org/sparql" sparql.defaultGraphURI = "http://dbpedia.org" sparql.recursionDepth = 1 +sparql.ontologyFile= "http://downloads.dbpedia.org/3.6/dbpedia_3.6.owl" sparql.instances = { "http://dbpedia.org/resource/Democritus", This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <sha...@us...> - 2012-04-18 02:26:52
|
Revision: 3638 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=3638&view=rev Author: shadowtm Date: 2012-04-18 02:26:42 +0000 (Wed, 18 Apr 2012) Log Message: ----------- Moved configuration interfaces into the interfaces module as that's a more appropriate location for them since they're used solely by our interfaces to bootstrap a DL-Learner configuration. Added Paths: ----------- trunk/interfaces/src/main/java/org/dllearner/configuration/IConfiguration.java trunk/interfaces/src/main/java/org/dllearner/configuration/IConfigurationProperty.java Removed Paths: ------------- trunk/components-core/src/main/java/org/dllearner/configuration/IConfiguration.java trunk/components-core/src/main/java/org/dllearner/configuration/IConfigurationProperty.java Deleted: trunk/components-core/src/main/java/org/dllearner/configuration/IConfiguration.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/configuration/IConfiguration.java 2012-04-13 14:02:37 UTC (rev 3637) +++ trunk/components-core/src/main/java/org/dllearner/configuration/IConfiguration.java 2012-04-18 02:26:42 UTC (rev 3638) @@ -1,64 +0,0 @@ -/** - * Copyright (C) 2007-2011, Jens Lehmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - */ - -package org.dllearner.configuration; - -import java.util.Collection; - -/** - * Created by IntelliJ IDEA. - * User: Chris - * Date: 8/18/11 - * Time: 9:45 PM - * - * This interface defines our interaction with a DL-Learner specific configuration. - * - */ -public interface IConfiguration { - - /** - * Get a collection of all the bean names defined in the configuration. - * - * @return a collection of all the bean names defined in the configuration. - */ - public Collection<String> getBeanNames(); - - /** - * Get the class for the given bean. - * - * @param beanName The name of the bean to get the class for. - * @return The class for the given bean. - */ - public Class getClass(String beanName); - - /** - * Get the Base Directory where this configuration should be running out of. - * - * @return The Base Directory where this configuration should be running out of. - */ - public String getBaseDir(); - - /** - * Get the configuration properties for the specified bean. - * - * @param beanName The bean to get properties for. - * @return A collection of properties - */ - public Collection<IConfigurationProperty> getConfigurationProperties(String beanName); -} Deleted: trunk/components-core/src/main/java/org/dllearner/configuration/IConfigurationProperty.java =================================================================== --- trunk/components-core/src/main/java/org/dllearner/configuration/IConfigurationProperty.java 2012-04-13 14:02:37 UTC (rev 3637) +++ trunk/components-core/src/main/java/org/dllearner/configuration/IConfigurationProperty.java 2012-04-18 02:26:42 UTC (rev 3638) @@ -1,42 +0,0 @@ -package org.dllearner.configuration; - -/** - * Created by IntelliJ IDEA. - * User: Chris - * Date: 8/27/11 - * Time: 8:57 AM - * - * Respresents a Configuraiton Option setting. - */ -public interface IConfigurationProperty { - - /** - * Get the Name of this Property - * - * @return The Name of this property. - */ - public String getName(); - - - /** - * Get the String representation of the value of this property. - * - * @return The String representation of the value of this property. - */ - public Object getValue(); - - - /** - * Does this property represent a bean reference? - * - * @return True if it does. - */ - public boolean isBeanReference(); - - /** - * Does this property represent a collection of bean references? - * - * @return True if it does. - */ - public boolean isBeanReferenceCollection(); -} Copied: trunk/interfaces/src/main/java/org/dllearner/configuration/IConfiguration.java (from rev 3637, trunk/components-core/src/main/java/org/dllearner/configuration/IConfiguration.java) =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/configuration/IConfiguration.java (rev 0) +++ trunk/interfaces/src/main/java/org/dllearner/configuration/IConfiguration.java 2012-04-18 02:26:42 UTC (rev 3638) @@ -0,0 +1,70 @@ +/** + * Copyright (C) 2007-2011, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +package org.dllearner.configuration; + +import java.util.Collection; + +/** + * Created by IntelliJ IDEA. + * User: Chris + * Date: 8/18/11 + * Time: 9:45 PM + * <p/> + * This interface represents on complete instance of a DL-Learner Configuration. + * <p/> + * Once an implementation of this interface is fully instantiated, it can be passed to an ApplicationContextBuilder in order + * to instantiate a Spring Application Context. + * <p/> + * Once the application context has been created, learning algorithms can be extracted and then executed. + * + * @see org.dllearner.configuration.spring.ApplicationContextBuilder + */ +public interface IConfiguration { + + /** + * Get a collection of all the bean names defined in the configuration. + * + * @return a collection of all the bean names defined in the configuration. + */ + public Collection<String> getBeanNames(); + + /** + * Get the class for the given bean. + * + * @param beanName The name of the bean to get the class for. + * @return The class for the given bean. + */ + public Class getClass(String beanName); + + /** + * Get the Base Directory where this configuration should be running out of. + * + * @return The Base Directory where this configuration should be running out of. + */ + public String getBaseDir(); + + /** + * Get the configuration properties for the specified bean. + * + * @param beanName The bean to get properties for. + * @return A collection of properties + */ + public Collection<IConfigurationProperty> getConfigurationProperties(String beanName); +} Copied: trunk/interfaces/src/main/java/org/dllearner/configuration/IConfigurationProperty.java (from rev 3637, trunk/components-core/src/main/java/org/dllearner/configuration/IConfigurationProperty.java) =================================================================== --- trunk/interfaces/src/main/java/org/dllearner/configuration/IConfigurationProperty.java (rev 0) +++ trunk/interfaces/src/main/java/org/dllearner/configuration/IConfigurationProperty.java 2012-04-18 02:26:42 UTC (rev 3638) @@ -0,0 +1,44 @@ +package org.dllearner.configuration; + +/** + * Created by IntelliJ IDEA. + * User: Chris + * Date: 8/27/11 + * Time: 8:57 AM + * + * This Represents a Configuration Option within a DL Learner Configuration. + * + * @see IConfiguration + */ +public interface IConfigurationProperty { + + /** + * Get the Name of this Property + * + * @return The Name of this property. + */ + public String getName(); + + + /** + * Get the String representation of the value of this property. + * + * @return The String representation of the value of this property. + */ + public Object getValue(); + + + /** + * Does this property represent a bean reference? + * + * @return True if it does. + */ + public boolean isBeanReference(); + + /** + * Does this property represent a collection of bean references? + * + * @return True if it does. + */ + public boolean isBeanReferenceCollection(); +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |