From: <jen...@us...> - 2008-01-14 08:56:23
|
Revision: 369 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=369&view=rev Author: jenslehmann Date: 2008-01-14 00:56:17 -0800 (Mon, 14 Jan 2008) Log Message: ----------- started to merge the two SPARQL components Modified Paths: -------------- trunk/lib/components.ini trunk/src/dl-learner/org/dllearner/cli/Start.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQueryMaker.java trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlCache.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlFilter.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlOntologyCollector.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/SparqlCache.java trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java trunk/src/dl-learner/org/dllearner/kb/SparqlEndpointRestructured.java trunk/src/dl-learner/org/dllearner/kb/SparqlFilter.java trunk/src/dl-learner/org/dllearner/kb/SparqlOntologyCollector.java trunk/src/dl-learner/org/dllearner/kb/SparqlQueryMaker.java Modified: trunk/lib/components.ini =================================================================== --- trunk/lib/components.ini 2008-01-13 10:22:16 UTC (rev 368) +++ trunk/lib/components.ini 2008-01-14 08:56:17 UTC (rev 369) @@ -3,8 +3,7 @@ # knowledge sources org.dllearner.kb.OWLFile org.dllearner.kb.KBFile -org.dllearner.kb.SparqlEndpoint -org.dllearner.kb.SparqlEndpointRestructured +org.dllearner.kb.sparql.SparqlEndpoint # reasoners org.dllearner.reasoning.OWLAPIReasoner org.dllearner.reasoning.DIGReasoner Modified: trunk/src/dl-learner/org/dllearner/cli/Start.java =================================================================== --- trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-13 10:22:16 UTC (rev 368) +++ trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-14 08:56:17 UTC (rev 369) @@ -61,7 +61,6 @@ import org.dllearner.kb.KBFile; import org.dllearner.kb.OWLFile; import org.dllearner.kb.SparqlEndpoint; -import org.dllearner.kb.SparqlEndpointRestructured; import org.dllearner.learningproblems.PosNegDefinitionLP; import org.dllearner.learningproblems.PosNegInclusionLP; import org.dllearner.learningproblems.PosOnlyDefinitionLP; @@ -231,7 +230,6 @@ Map<Class<? extends Component>, String> componentPrefixMapping = new HashMap<Class<? extends Component>, String>(); // knowledge sources componentPrefixMapping.put(SparqlEndpoint.class, "sparql"); - componentPrefixMapping.put(SparqlEndpointRestructured.class, "sparql2"); // reasoners componentPrefixMapping.put(DIGReasoner.class, "digReasoner"); componentPrefixMapping.put(OWLAPIReasoner.class, "owlAPIReasoner"); @@ -386,8 +384,6 @@ ksClass = KBFile.class; else if (formatString.equals("SPARQL")) ksClass = SparqlEndpoint.class; - else if (formatString.equals("SPARQL2")) - ksClass = SparqlEndpointRestructured.class; else if (formatString.equals("NT")) ksClass = OWLFile.class; else { Deleted: trunk/src/dl-learner/org/dllearner/kb/SparqlCache.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/SparqlCache.java 2008-01-13 10:22:16 UTC (rev 368) +++ trunk/src/dl-learner/org/dllearner/kb/SparqlCache.java 2008-01-14 08:56:17 UTC (rev 369) @@ -1,202 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; -import java.net.URLEncoder; -/** - * - * This is a primitive cache. - * The objects of this class can be either the cache itself or just on entry in the cache - * - * the cache remembers: a timestamp, the original sparql-query, the result - * key is the subject http://dbpedia.org/resource/Angela_Merkel which is first urlencoded - * and so serves as the hash for the filename. - * Cache validates if timestamp too old and Sparql-Query the same - * before returning the SPARQL xml-result - * - * @author Sebastian Hellmann - * @author Sebastian Knappe - */ -public class SparqlCache implements Serializable{ - - final static long serialVersionUID=104; - transient String basedir=""; - transient String fileending=".cache"; - long timestamp; - String content=""; - long daysoffreshness=15; - long multiplier=24*60*60*1000;//h m s ms - String sparqlquery=""; - - - /** - * Constructor for the cache itself. - * Called once at the beginning - * - * @param path Where the base path to the cache is - */ - public SparqlCache(String path){ - this.basedir=path+File.separator; - if(!new File(path).exists()) - {System.out.println(new File(path).mkdir());;} - - } - -// - /** - * Constructor for single cache object(one entry) - * - * @param content the sparql xml result - * @param sparql the sparql query - */ - public SparqlCache(String content, String sparql){ - this.content=content; - this.sparqlquery=sparql; - this.timestamp=System.currentTimeMillis(); - } - - - - /** - * use only on the cache object describing the cache itself - * - * @param key the individual - * @param sparql the sparql query - * @return the cached sparql result or null - */ - public String get(String key, String sparql){ - String ret=null; - try{ - SparqlCache c =readFromFile(makeFilename(key)); - if(c==null)return null; - if(!c.checkFreshness())return null; - if(!c.validate(sparql))return null; - - ret=c.content; - }catch (Exception e) {e.printStackTrace();} - return ret; - } - - /** - * - * constructor for single cache object(one entry) - * - * @param key the individual - * @param content the sparql result - * @param sparql the sparql query - */ - public void put(String key, String content, String sparql){ - SparqlCache c=new SparqlCache(content,sparql); - putIntoFile(makeFilename(key), c); - } - - - /** - * to normalize the filenames - * - * @param key - * @return - */ - String makeFilename(String key){ - String ret=""; - try{ - ret=basedir+URLEncoder.encode(key, "UTF-8")+fileending; - }catch (Exception e) {e.printStackTrace();} - return ret; - } - - /** - * how old is the result - * @return - */ - boolean checkFreshness(){ - if((System.currentTimeMillis()-this.timestamp)<=(daysoffreshness*multiplier)) - //fresh - return true; - else return false; - } - - - /** - * some sparql query - * @param sparql - * @return - */ - boolean validate(String sparql){ - if(this.sparqlquery.equals(sparql)) - //valid - return true; - else return false; - } - - /** - * makes a new file if none exists - * @param Filename - */ - public void checkFile(String Filename){ - if(!new File(Filename).exists()){ - try{ - new File(Filename).createNewFile(); - }catch (Exception e) {e.printStackTrace();} - - } - - } - - /** - * internal saving function - * puts a cache object into a file - * - * @param Filename - * @param content - */ - public void putIntoFile(String Filename,SparqlCache content){ - try{ - FileOutputStream fos = new FileOutputStream( Filename , false ); - ObjectOutputStream o = new ObjectOutputStream( fos ); - o.writeObject( content ); - fos.flush(); - fos.close(); - }catch (Exception e) {System.out.println("Not in cache creating: "+Filename);} - } - - /** - * internal retrieval function - * - * @param Filename - * @return one entry object - */ - public SparqlCache readFromFile(String Filename){ - SparqlCache content=null; - try{ - FileInputStream fos = new FileInputStream( Filename ); - ObjectInputStream o = new ObjectInputStream( fos ); - content=(SparqlCache)o.readObject(); - }catch (Exception e) {} - return content; - - } -} Deleted: trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java 2008-01-13 10:22:16 UTC (rev 368) +++ trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java 2008-01-14 08:56:17 UTC (rev 369) @@ -1,342 +0,0 @@ -/** - * Copyright (C) 2007, Jens Lehmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; -import java.util.Collection; -import java.util.LinkedList; -import java.util.Set; - -import org.dllearner.core.KnowledgeSource; -import org.dllearner.core.OntologyFormat; -import org.dllearner.core.OntologyFormatUnsupportedException; -import org.dllearner.core.config.BooleanConfigOption; -import org.dllearner.core.config.ConfigEntry; -import org.dllearner.core.config.ConfigOption; -import org.dllearner.core.config.IntegerConfigOption; -import org.dllearner.core.config.InvalidConfigOptionValueException; -import org.dllearner.core.config.StringConfigOption; -import org.dllearner.core.config.StringSetConfigOption; -import org.dllearner.core.dl.KB; -import org.dllearner.parser.KBParser; -import org.dllearner.reasoning.DIGConverter; -import org.dllearner.reasoning.JenaOWLDIGConverter; -import org.dllearner.utilities.Datastructures; - -/** - * Represents a SPARQL Endpoint. - * TODO: Is it necessary to create a class DBpediaSparqlEndpoint? - * - * @author Jens Lehmann - * @author Sebastian Knappe - */ -public class SparqlEndpoint extends KnowledgeSource { - - //ConfigOptions - private URL url; - private Set<String> instances; - private URL dumpFile; - private int numberOfRecursions; - private int filterMode; - private Set<String> predList; - private Set<String> objList; - private Set<String> classList; - private String format; - private boolean dumpToFile; - private boolean useLits=false; - - /** - * Holds the results of the calculateSubjects method - */ - private String[] subjects; - - /** - * Holds the results of the calculateTriples method - */ - private String[] triples; - - /** - * Holds the results of the calculateConceptSubjects method - */ - private String[] conceptSubjects; - - /** - * if a method is running this becomes true - */ - private boolean subjectThreadRunning=false; - - private boolean triplesThreadRunning=false; - - private boolean conceptThreadRunning=false; - - /** - * the Thread that is running a method - */ - private Thread subjectThread; - - private Thread triplesThread; - - private Thread conceptThread; - - //received ontology as array, used if format=Array(an element of the - //array consists of the subject, predicate and object separated by '<' - private String[] ontArray; - - //received ontology as KB, the internal format - private KB kb; - - public static String getName() { - return "SPARQL Endpoint"; - } - - /** - * sets the ConfigOptions for this KnowledgeSource - * @return - */ - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); - options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); - options.add(new StringSetConfigOption("instances","relevant instances e.g. positive and negative examples in a learning problem")); - options.add(new IntegerConfigOption("numberOfRecursions","number of Recursions, the Sparql-Endpoint is asked")); - options.add(new IntegerConfigOption("filterMode","the mode of the SPARQL Filter")); - options.add(new StringSetConfigOption("predList","a predicate list")); - options.add(new StringSetConfigOption("objList","an object list")); - options.add(new StringSetConfigOption("classList","a class list")); - options.add(new StringConfigOption("format", "N-TRIPLES or KB format")); - options.add(new BooleanConfigOption("dumpToFile", "wether Ontology from DBPedia is written to a file or not")); - options.add(new BooleanConfigOption("useLits","use Literals in SPARQL query")); - return options; - } - - /* - * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) - */ - @Override - @SuppressWarnings({"unchecked"}) - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { - String option = entry.getOptionName(); - if (option.equals("url")) { - String s = (String) entry.getValue(); - try { - url = new URL(s); - } catch (MalformedURLException e) { - throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(),"malformed URL " + s); - } - } else if(option.equals("instances")) { - instances = (Set<String>) entry.getValue(); - } else if(option.equals("numberOfRecursions")){ - numberOfRecursions=(Integer)entry.getValue(); - } else if(option.equals("predList")) { - predList = (Set<String>) entry.getValue(); - } else if(option.equals("objList")) { - objList = (Set<String>) entry.getValue(); - } else if(option.equals("classList")) { - classList = (Set<String>) entry.getValue(); - } else if(option.equals("filterMode")){ - filterMode=(Integer)entry.getValue(); - } else if(option.equals("format")){ - format=(String)entry.getValue(); - } else if(option.equals("dumpToFile")){ - dumpToFile=(Boolean)entry.getValue(); - } else if(option.equals("useLits")){ - useLits=(Boolean)entry.getValue(); - } - } - - /* (non-Javadoc) - * @see org.dllearner.core.Component#init() - */ - @Override - public void init() { - System.out.println("SparqlModul: Collecting Ontology"); - SparqlOntologyCollector oc=new SparqlOntologyCollector(Datastructures.setToArray(instances), numberOfRecursions, filterMode, - Datastructures.setToArray(predList),Datastructures.setToArray( objList),Datastructures.setToArray(classList),format,url,useLits); - - try - { - String ont=oc.collectOntology(); - - if (dumpToFile){ - String filename=System.currentTimeMillis()+".nt"; - String basedir="cache"+File.separator; - try{ - if(!new File(basedir).exists()) - new File(basedir).mkdir(); - - FileWriter fw=new FileWriter(new File(basedir+filename),true); - fw.write(ont); - fw.flush(); - fw.close(); - - dumpFile=(new File(basedir+filename)).toURI().toURL(); - }catch (Exception e) {e.printStackTrace();} - } - if (format.equals("KB")) { - try{ - // kb=KBParser.parseKBFile(new StringReader(ont)); - kb=KBParser.parseKBFile(dumpFile); - } catch(Exception e) {e.printStackTrace();} - } - }catch(IOException e) { - e.printStackTrace(); - } - System.out.println("SparqlModul: ****Finished"); - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.KnowledgeSource#toDIG() - */ - @Override - public String toDIG(URI kbURI) { - if (format.equals("N-TRIPLES")) return JenaOWLDIGConverter.getTellsString(dumpFile, OntologyFormat.N_TRIPLES, kbURI); - else return DIGConverter.getDIGString(kb, kbURI).toString(); - } - - /* (non-Javadoc) - * @see org.dllearner.core.KnowledgeSource#export(java.io.File, org.dllearner.core.OntologyFormat) - */ - @Override - public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { - // currently no export functions implemented, so we just throw an exception - throw new OntologyFormatUnsupportedException("export", format); - } - - public URL getURL() { - return url; - } - - public String[] getOntArray() { - return ontArray; - } - - public void calculateSubjects(String label,int limit) - { - System.out.println("SparqlModul: Collecting Subjects"); - SparqlOntologyCollector oc=new SparqlOntologyCollector(url); - try{ - subjects=oc.getSubjectsFromLabel(label,limit); - }catch (IOException e){ - subjects=new String[1]; - subjects[0]="[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public void calculateTriples(String subject) { - System.out.println("SparqlModul: Collecting Triples"); - SparqlOntologyCollector oc=new SparqlOntologyCollector(url); - try{ - triples=oc.collectTriples(subject); - }catch (IOException e){ - triples=new String[1]; - triples[0]="[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public void calculateConceptSubjects(String concept) - { - System.out.println("SparqlModul: Collecting Subjects"); - SparqlOntologyCollector oc=new SparqlOntologyCollector(url); - try{ - conceptSubjects=oc.getSubjectsFromConcept(concept); - }catch (IOException e){ - conceptSubjects=new String[1]; - conceptSubjects[0]="[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public boolean subjectThreadIsRunning() - { - return subjectThreadRunning; - } - - public void setSubjectThreadRunning(boolean bool) - { - subjectThreadRunning=bool; - } - - public boolean triplesThreadIsRunning() - { - return triplesThreadRunning; - } - - public void setTriplesThreadRunning(boolean bool) - { - triplesThreadRunning=bool; - } - - public boolean conceptThreadIsRunning() - { - return conceptThreadRunning; - } - - public void setConceptThreadRunning(boolean bool) - { - conceptThreadRunning=bool; - } - - public String[] getSubjects() - { - return subjects; - } - - public Thread getSubjectThread() { - return subjectThread; - } - - public void setSubjectThread(Thread subjectThread) { - this.subjectThread = subjectThread; - } - - public Thread getTriplesThread() { - return triplesThread; - } - - public void setTriplesThread(Thread triplesThread) { - this.triplesThread = triplesThread; - } - - public Thread getConceptThread() { - return conceptThread; - } - - public void setConceptThread(Thread conceptThread) { - this.conceptThread = conceptThread; - } - - public String[] getTriples() - { - return triples; - } - - public String[] getConceptSubjects() - { - return conceptSubjects; - } -} Copied: trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java (from rev 367, trunk/src/dl-learner/org/dllearner/kb/SparqlEndpointRestructured.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java 2008-01-14 08:56:17 UTC (rev 369) @@ -0,0 +1,510 @@ +/** + * Copyright (C) 2007, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URL; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Set; + +import org.dllearner.core.KnowledgeSource; +import org.dllearner.core.OntologyFormat; +import org.dllearner.core.OntologyFormatUnsupportedException; +import org.dllearner.core.config.BooleanConfigOption; +import org.dllearner.core.config.ConfigEntry; +import org.dllearner.core.config.ConfigOption; +import org.dllearner.core.config.IntegerConfigOption; +import org.dllearner.core.config.InvalidConfigOptionValueException; +import org.dllearner.core.config.StringConfigOption; +import org.dllearner.core.config.StringSetConfigOption; +import org.dllearner.core.config.StringTupleListConfigOption; +import org.dllearner.core.dl.KB; +import org.dllearner.kb.sparql.Manager; +import org.dllearner.kb.sparql.Manipulator; +import org.dllearner.kb.sparql.PredefinedEndpoint; +import org.dllearner.kb.sparql.PredefinedFilter; +import org.dllearner.kb.sparql.SparqlOntologyCollector; +import org.dllearner.kb.sparql.SparqlQueryType; +import org.dllearner.kb.sparql.SpecificSparqlEndpoint; +import org.dllearner.parser.KBParser; +import org.dllearner.reasoning.DIGConverter; +import org.dllearner.reasoning.JenaOWLDIGConverter; +import org.dllearner.utilities.StringTuple; + +/** + * Represents a SPARQL Endpoint. + * + * @author Jens Lehmann + * @author Sebastian Knappe + * @author Sebastian Hellmann + */ +public class SparqlEndpoint extends KnowledgeSource { + + // ConfigOptions + private URL url; + String host; + private Set<String> instances=new HashSet<String>();; + private URL dumpFile; + private int recursionDepth = 1; + private int predefinedFilter = 0; + private int predefinedEndpoint = 0; + private Set<String> predList=new HashSet<String>(); + private Set<String> objList=new HashSet<String>(); + // private Set<String> classList; + private String format = "N-TRIPLES"; + private boolean dumpToFile = true; + private boolean useLits = false; + private boolean getAllSuperClasses = true; + private boolean closeAfterRecursion = true; + private int breakSuperClassRetrievalAfter = 200; + + private boolean learnDomain = false; + private boolean learnRange = false; + private int numberOfInstancesUsedForRoleLearning=40; + private String role=""; + private String blankNodeIdentifier = "bnode"; + + LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); + LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); + LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); + + /** + * Holds the results of the calculateSubjects method + */ + private String[] subjects; + + /** + * Holds the results of the calculateTriples method + */ + private String[] triples; + + /** + * Holds the results of the calculateConceptSubjects method + */ + private String[] conceptSubjects; + + /** + * if a method is running this becomes true + */ + private boolean subjectThreadRunning = false; + + private boolean triplesThreadRunning = false; + + private boolean conceptThreadRunning = false; + + /** + * the Thread that is running a method + */ + private Thread subjectThread; + + private Thread triplesThread; + + private Thread conceptThread; + + // received ontology as array, used if format=Array(an element of the + // array consists of the subject, predicate and object separated by '<' + private String[] ontArray; + + // received ontology as KB, the internal format + private KB kb; + + public static String getName() { + return "SPARQL Endpoint Restructured"; + } + + /** + * sets the ConfigOptions for this KnowledgeSource + * + * @return + */ + public static Collection<ConfigOption<?>> createConfigOptions() { + Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); + options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); + options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); + options.add(new StringSetConfigOption("instances", + "relevant instances e.g. positive and negative examples in a learning problem")); + options.add(new IntegerConfigOption("recursionDepth", + "recursion depth of KB fragment selection", 2)); + options.add(new IntegerConfigOption("predefinedFilter", "the mode of the SPARQL Filter")); + options.add(new IntegerConfigOption("predefinedEndpoint", "the mode of the SPARQL Filter")); + + options.add(new StringSetConfigOption("predList", "list of all ignored roles")); + options.add(new StringSetConfigOption("objList", "list of all ignored objects")); + options.add(new StringSetConfigOption("classList", "list of all ignored classes")); + options.add(new StringConfigOption("format", "N-TRIPLES or KB format", "N-TRIPLES")); + options.add(new BooleanConfigOption("dumpToFile", + "Specifies whether the extracted ontology is written to a file or not.", true)); + options.add(new BooleanConfigOption("useLits", "use Literals in SPARQL query")); + options.add(new BooleanConfigOption("getAllSuperClasses", "If true then all superclasses are retrieved until the most general class (owl:Thing) is reached.", true)); + + options.add(new BooleanConfigOption("learnDomain", "learns the Domain for a Role")); + options.add(new BooleanConfigOption("learnRange", "learns the Range for a Role")); + options.add(new StringConfigOption("role", "role to learn Domain/Range from")); + options.add(new StringConfigOption("blankNodeIdentifier", + "used to identify blanknodes in Tripels")); + + options.add(new StringTupleListConfigOption("example", "example")); + options.add(new StringTupleListConfigOption("replacePredicate", "rule for replacing predicates")); + options.add(new StringTupleListConfigOption("replaceObject", "rule for replacing predicates")); + options.add(new IntegerConfigOption("breakSuperClassRetrievalAfter", "stops a cyclic hierarchy after specified number of classes")); + options.add(new IntegerConfigOption("numberOfInstancesUsedForRoleLearning", "")); + options.add(new BooleanConfigOption("closeAfterRecursion", "gets all classes for all instances")); + + + + return options; + } + + /* + * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) + */ + @Override + @SuppressWarnings( { "unchecked" }) + public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { + String option = entry.getOptionName(); + if (option.equals("url")) { + String s = (String) entry.getValue(); + try { + url = new URL(s); + } catch (MalformedURLException e) { + throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(), + "malformed URL " + s); + } + } else if (option.equals("host")) { + host = (String) entry.getValue(); + } else if (option.equals("instances")) { + instances = (Set<String>) entry.getValue(); + } else if (option.equals("recursionDepth")) { + recursionDepth = (Integer) entry.getValue(); + } else if (option.equals("predList")) { + predList = (Set<String>) entry.getValue(); + } else if (option.equals("objList")) { + objList = (Set<String>) entry.getValue(); + //} else if (option.equals("classList")) { + // classList = (Set<String>) entry.getValue(); + } else if (option.equals("predefinedEndpoint")) { + predefinedEndpoint = (Integer) entry.getValue(); + } else if (option.equals("predefinedFilter")) { + predefinedFilter = (Integer) entry.getValue(); + } else if (option.equals("format")) { + format = (String) entry.getValue(); + } else if (option.equals("dumpToFile")) { + dumpToFile = (Boolean) entry.getValue(); + } else if (option.equals("useLits")) { + useLits = (Boolean) entry.getValue(); + } else if (option.equals("getAllSuperClasses")) { + getAllSuperClasses = (Boolean) entry.getValue(); + } else if (option.equals("learnDomain")) { + learnDomain = (Boolean) entry.getValue(); + }else if (option.equals("learnRange")) { + learnRange = (Boolean) entry.getValue(); + } else if (option.equals("role")) { + role = (String) entry.getValue(); + } else if (option.equals("blankNodeIdentifier")) { + blankNodeIdentifier = (String) entry.getValue(); + } else if (option.equals("example")) { + //System.out.println(entry.getValue()); + }else if (option.equals("replacePredicate")) { + replacePredicate = (LinkedList)entry.getValue(); + }else if (option.equals("replaceObject")) { + replaceObject = (LinkedList)entry.getValue(); + }else if (option.equals("breakSuperClassRetrievalAfter")) { + breakSuperClassRetrievalAfter = (Integer) entry.getValue(); + }else if (option.equals("numberOfInstancesUsedForRoleLearning")) { + numberOfInstancesUsedForRoleLearning = (Integer) entry.getValue(); + }else if (option.equals("closeAfterRecursion")) { + closeAfterRecursion = (Boolean) entry.getValue(); + } + + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.Component#init() + */ + @Override + public void init() { + System.out.println("SparqlModul: Collecting Ontology"); + // SparqlOntologyCollector oc= + // new SparqlOntologyCollector(Datastructures.setToArray(instances), + // numberOfRecursions, filterMode, + // Datastructures.setToArray(predList),Datastructures.setToArray( + // objList),Datastructures.setToArray(classList),format,url,useLits); + Manager m = new Manager(); + SpecificSparqlEndpoint sse = null; + SparqlQueryType sqt = null; + // get Options for Manipulator + Manipulator man = new Manipulator(blankNodeIdentifier,breakSuperClassRetrievalAfter,replacePredicate,replaceObject); + HashMap<String, String> parameters = new HashMap<String, String>(); + parameters.put("default-graph-uri", "http://dbpedia.org"); + parameters.put("format", "application/sparql-results.xml"); + + // get Options for endpoints + if (predefinedEndpoint >= 1) { + sse = PredefinedEndpoint.getEndpoint(predefinedEndpoint); + } else { + sse = new SpecificSparqlEndpoint(url, host, parameters); + } + + // get Options for Filters + + if (predefinedFilter >= 1) { + sqt = PredefinedFilter.getFilter(predefinedFilter); + + } else { + sqt = new SparqlQueryType("forbid", objList, predList, useLits + ""); + + } + // give everything to the manager + m.useConfiguration(sqt, sse, man, recursionDepth, getAllSuperClasses,closeAfterRecursion); + try { + String ont = ""; + //System.out.println(learnDomain); + // used to learn a domain of a role + if (learnDomain || learnRange) { + Set<String> pos=new HashSet<String>(); + Set<String> neg=new HashSet<String>(); + if(learnDomain){ + pos = m.getDomainInstancesForRole(role); + neg = m.getRangeInstancesForRole(role); + }else if(learnRange){ + neg = m.getDomainInstancesForRole(role); + pos = m.getRangeInstancesForRole(role); + } + //choose 30 + + + Set<String> tmp=new HashSet<String>(); + for(String one:pos){ + tmp.add(one); + if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; + } + pos=tmp; + System.out.println("Instances used: "+pos.size()); + + tmp=new HashSet<String>(); + for(String one:neg){ + tmp.add(one); + if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; + } + neg=tmp; + + instances=new HashSet<String>(); + instances.addAll(pos); + + instances.addAll(neg); + + for(String one:pos){ + System.out.println("+\""+one+"\""); + } + for(String one:neg){ + System.out.println("-\""+one+"\""); + } + + /*Random r= new Random(); + + + Object[] arr=instances.toArray(); + while(instances.size()>=30){ + + }*/ + // add the role to the filter(a solution is always EXISTS + // role.TOP) + m.addPredicateFilter(role); + //System.out.println(instances); + // THIS is a workaround + + } + // the actual extraction is started here + ont = m.extract(instances); + System.out.println("Number of cached SPARQL queries: "+m.getConfiguration().numberOfCachedSparqlQueries); + System.out.println("Number of uncached SPARQL queries: "+m.getConfiguration().numberOfUncachedSparqlQueries); + + System.out.println("Finished collecting Fragment"); + + if (dumpToFile) { + String filename = System.currentTimeMillis() + ".nt"; + String basedir = "cache" + File.separator; + try { + if (!new File(basedir).exists()) + new File(basedir).mkdir(); + + FileWriter fw = new FileWriter(new File(basedir + filename), true); + fw.write(ont); + fw.flush(); + fw.close(); + + dumpFile = (new File(basedir + filename)).toURI().toURL(); + } catch (Exception e) { + e.printStackTrace(); + } + } + if (format.equals("KB")) { + try { + //kb = KBParser.parseKBFile(new StringReader(ont)); + kb=KBParser.parseKBFile(dumpFile); + } catch (Exception e) { + e.printStackTrace(); + } + } + } catch (Exception e) { + e.printStackTrace(); + } + System.out.println("SparqlModul: ****Finished"); + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.KnowledgeSource#toDIG() + */ + @Override + public String toDIG(URI kbURI) { + if (format.equals("N-TRIPLES")) + return JenaOWLDIGConverter.getTellsString(dumpFile, OntologyFormat.N_TRIPLES, kbURI); + else + return DIGConverter.getDIGString(kb, kbURI).toString(); + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.KnowledgeSource#export(java.io.File, + * org.dllearner.core.OntologyFormat) + */ + @Override + public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { + // currently no export functions implemented, so we just throw an + // exception + throw new OntologyFormatUnsupportedException("export", format); + } + + public URL getURL() { + return url; + } + + public String[] getOntArray() { + return ontArray; + } + + public void calculateSubjects(String label, int limit) { + System.out.println("SparqlModul: Collecting Subjects"); + SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + try { + subjects = oc.getSubjectsFromLabel(label, limit); + } catch (IOException e) { + subjects = new String[1]; + subjects[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + public void calculateTriples(String subject) { + System.out.println("SparqlModul: Collecting Triples"); + SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + try { + triples = oc.collectTriples(subject); + } catch (IOException e) { + triples = new String[1]; + triples[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + public void calculateConceptSubjects(String concept) { + System.out.println("SparqlModul: Collecting Subjects"); + SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + try { + conceptSubjects = oc.getSubjectsFromConcept(concept); + } catch (IOException e) { + conceptSubjects = new String[1]; + conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + public boolean subjectThreadIsRunning() { + return subjectThreadRunning; + } + + public void setSubjectThreadRunning(boolean bool) { + subjectThreadRunning = bool; + } + + public boolean triplesThreadIsRunning() { + return triplesThreadRunning; + } + + public void setTriplesThreadRunning(boolean bool) { + triplesThreadRunning = bool; + } + + public boolean conceptThreadIsRunning() { + return conceptThreadRunning; + } + + public void setConceptThreadRunning(boolean bool) { + conceptThreadRunning = bool; + } + + public String[] getSubjects() { + return subjects; + } + + public Thread getSubjectThread() { + return subjectThread; + } + + public void setSubjectThread(Thread subjectThread) { + this.subjectThread = subjectThread; + } + + public Thread getTriplesThread() { + return triplesThread; + } + + public void setTriplesThread(Thread triplesThread) { + this.triplesThread = triplesThread; + } + + public Thread getConceptThread() { + return conceptThread; + } + + public void setConceptThread(Thread conceptThread) { + this.conceptThread = conceptThread; + } + + public String[] getTriples() { + return triples; + } + + public String[] getConceptSubjects() { + return conceptSubjects; + } +} Deleted: trunk/src/dl-learner/org/dllearner/kb/SparqlEndpointRestructured.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/SparqlEndpointRestructured.java 2008-01-13 10:22:16 UTC (rev 368) +++ trunk/src/dl-learner/org/dllearner/kb/SparqlEndpointRestructured.java 2008-01-14 08:56:17 UTC (rev 369) @@ -1,509 +0,0 @@ -/** - * Copyright (C) 2007, Jens Lehmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.Set; - -import org.dllearner.core.KnowledgeSource; -import org.dllearner.core.OntologyFormat; -import org.dllearner.core.OntologyFormatUnsupportedException; -import org.dllearner.core.config.BooleanConfigOption; -import org.dllearner.core.config.ConfigEntry; -import org.dllearner.core.config.ConfigOption; -import org.dllearner.core.config.IntegerConfigOption; -import org.dllearner.core.config.InvalidConfigOptionValueException; -import org.dllearner.core.config.StringConfigOption; -import org.dllearner.core.config.StringSetConfigOption; -import org.dllearner.core.config.StringTupleListConfigOption; -import org.dllearner.core.dl.KB; -import org.dllearner.kb.sparql.Manager; -import org.dllearner.kb.sparql.Manipulator; -import org.dllearner.kb.sparql.PredefinedEndpoint; -import org.dllearner.kb.sparql.PredefinedFilter; -import org.dllearner.kb.sparql.SparqlQueryType; -import org.dllearner.kb.sparql.SpecificSparqlEndpoint; -import org.dllearner.parser.KBParser; -import org.dllearner.reasoning.DIGConverter; -import org.dllearner.reasoning.JenaOWLDIGConverter; -import org.dllearner.utilities.StringTuple; - -/** - * Represents a SPARQL Endpoint. - * - * @author Jens Lehmann - * @author Sebastian Knappe - * @author Sebastian Hellmann - */ -public class SparqlEndpointRestructured extends KnowledgeSource { - - // ConfigOptions - private URL url; - String host; - private Set<String> instances=new HashSet<String>();; - private URL dumpFile; - private int recursionDepth = 1; - private int predefinedFilter = 0; - private int predefinedEndpoint = 0; - private Set<String> predList=new HashSet<String>(); - private Set<String> objList=new HashSet<String>(); - private Set<String> classList; - private String format = "N-TRIPLES"; - private boolean dumpToFile = true; - private boolean useLits = false; - private boolean getAllSuperClasses = true; - private boolean closeAfterRecursion = true; - private int breakSuperClassRetrievalAfter = 200; - - private boolean learnDomain = false; - private boolean learnRange = false; - private int numberOfInstancesUsedForRoleLearning=40; - private String role=""; - private String blankNodeIdentifier = "bnode"; - - LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); - LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); - LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); - - /** - * Holds the results of the calculateSubjects method - */ - private String[] subjects; - - /** - * Holds the results of the calculateTriples method - */ - private String[] triples; - - /** - * Holds the results of the calculateConceptSubjects method - */ - private String[] conceptSubjects; - - /** - * if a method is running this becomes true - */ - private boolean subjectThreadRunning = false; - - private boolean triplesThreadRunning = false; - - private boolean conceptThreadRunning = false; - - /** - * the Thread that is running a method - */ - private Thread subjectThread; - - private Thread triplesThread; - - private Thread conceptThread; - - // received ontology as array, used if format=Array(an element of the - // array consists of the subject, predicate and object separated by '<' - private String[] ontArray; - - // received ontology as KB, the internal format - private KB kb; - - public static String getName() { - return "SPARQL Endpoint Restructured"; - } - - /** - * sets the ConfigOptions for this KnowledgeSource - * - * @return - */ - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); - options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); - options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); - options.add(new StringSetConfigOption("instances", - "relevant instances e.g. positive and negative examples in a learning problem")); - options.add(new IntegerConfigOption("recursionDepth", - "recursion depth of KB fragment selection", 2)); - options.add(new IntegerConfigOption("predefinedFilter", "the mode of the SPARQL Filter")); - options.add(new IntegerConfigOption("predefinedEndpoint", "the mode of the SPARQL Filter")); - - options.add(new StringSetConfigOption("predList", "list of all ignored roles")); - options.add(new StringSetConfigOption("objList", "list of all ignored objects")); - options.add(new StringSetConfigOption("classList", "list of all ignored classes")); - options.add(new StringConfigOption("format", "N-TRIPLES or KB format", "N-TRIPLES")); - options.add(new BooleanConfigOption("dumpToFile", - "Specifies whether the extracted ontology is written to a file or not.", true)); - options.add(new BooleanConfigOption("useLits", "use Literals in SPARQL query")); - options.add(new BooleanConfigOption("getAllSuperClasses", "If true then all superclasses are retrieved until the most general class (owl:Thing) is reached.", true)); - - options.add(new BooleanConfigOption("learnDomain", "learns the Domain for a Role")); - options.add(new BooleanConfigOption("learnRange", "learns the Range for a Role")); - options.add(new StringConfigOption("role", "role to learn Domain/Range from")); - options.add(new StringConfigOption("blankNodeIdentifier", - "used to identify blanknodes in Tripels")); - - options.add(new StringTupleListConfigOption("example", "example")); - options.add(new StringTupleListConfigOption("replacePredicate", "rule for replacing predicates")); - options.add(new StringTupleListConfigOption("replaceObject", "rule for replacing predicates")); - options.add(new IntegerConfigOption("breakSuperClassRetrievalAfter", "stops a cyclic hierarchy after specified number of classes")); - options.add(new IntegerConfigOption("numberOfInstancesUsedForRoleLearning", "")); - options.add(new BooleanConfigOption("closeAfterRecursion", "gets all classes for all instances")); - - - - return options; - } - - /* - * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) - */ - @Override - @SuppressWarnings( { "unchecked" }) - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { - String option = entry.getOptionName(); - if (option.equals("url")) { - String s = (String) entry.getValue(); - try { - url = new URL(s); - } catch (MalformedURLException e) { - throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(), - "malformed URL " + s); - } - } else if (option.equals("host")) { - host = (String) entry.getValue(); - } else if (option.equals("instances")) { - instances = (Set<String>) entry.getValue(); - } else if (option.equals("recursionDepth")) { - recursionDepth = (Integer) entry.getValue(); - } else if (option.equals("predList")) { - predList = (Set<String>) entry.getValue(); - } else if (option.equals("objList")) { - objList = (Set<String>) entry.getValue(); - } else if (option.equals("classList")) { - classList = (Set<String>) entry.getValue(); - } else if (option.equals("predefinedEndpoint")) { - predefinedEndpoint = (Integer) entry.getValue(); - } else if (option.equals("predefinedFilter")) { - predefinedFilter = (Integer) entry.getValue(); - } else if (option.equals("format")) { - format = (String) entry.getValue(); - } else if (option.equals("dumpToFile")) { - dumpToFile = (Boolean) entry.getValue(); - } else if (option.equals("useLits")) { - useLits = (Boolean) entry.getValue(); - } else if (option.equals("getAllSuperClasses")) { - getAllSuperClasses = (Boolean) entry.getValue(); - } else if (option.equals("learnDomain")) { - learnDomain = (Boolean) entry.getValue(); - }else if (option.equals("learnRange")) { - learnRange = (Boolean) entry.getValue(); - } else if (option.equals("role")) { - role = (String) entry.getValue(); - } else if (option.equals("blankNodeIdentifier")) { - blankNodeIdentifier = (String) entry.getValue(); - } else if (option.equals("example")) { - //System.out.println(entry.getValue()); - }else if (option.equals("replacePredicate")) { - replacePredicate = (LinkedList)entry.getValue(); - }else if (option.equals("replaceObject")) { - replaceObject = (LinkedList)entry.getValue(); - }else if (option.equals("breakSuperClassRetrievalAfter")) { - breakSuperClassRetrievalAfter = (Integer) entry.getValue(); - }else if (option.equals("numberOfInstancesUsedForRoleLearning")) { - numberOfInstancesUsedForRoleLearning = (Integer) entry.getValue(); - }else if (option.equals("closeAfterRecursion")) { - closeAfterRecursion = (Boolean) entry.getValue(); - } - - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.Component#init() - */ - @Override - public void init() { - System.out.println("SparqlModul: Collecting Ontology"); - // SparqlOntologyCollector oc= - // new SparqlOntologyCollector(Datastructures.setToArray(instances), - // numberOfRecursions, filterMode, - // Datastructures.setToArray(predList),Datastructures.setToArray( - // objList),Datastructures.setToArray(classList),format,url,useLits); - Manager m = new Manager(); - SpecificSparqlEndpoint sse = null; - SparqlQueryType sqt = null; - // get Options for Manipulator - Manipulator man = new Manipulator(blankNodeIdentifier,breakSuperClassRetrievalAfter,replacePredicate,replaceObject); - HashMap<String, String> parameters = new HashMap<String, String>(); - parameters.put("default-graph-uri", "http://dbpedia.org"); - parameters.put("format", "application/sparql-results.xml"); - - // get Options for endpoints - if (predefinedEndpoint >= 1) { - sse = PredefinedEndpoint.getEndpoint(predefinedEndpoint); - } else { - sse = new SpecificSparqlEndpoint(url, host, parameters); - } - - // get Options for Filters - - if (predefinedFilter >= 1) { - sqt = PredefinedFilter.getFilter(predefinedFilter); - - } else { - sqt = new SparqlQueryType("forbid", objList, predList, useLits + ""); - - } - // give everything to the manager - m.useConfiguration(sqt, sse, man, recursionDepth, getAllSuperClasses,closeAfterRecursion); - try { - String ont = ""; - //System.out.println(learnDomain); - // used to learn a domain of a role - if (learnDomain || learnRange) { - Set<String> pos=new HashSet<String>(); - Set<String> neg=new HashSet<String>(); - if(learnDomain){ - pos = m.getDomainInstancesForRole(role); - neg = m.getRangeInstancesForRole(role); - }else if(learnRange){ - neg = m.getDomainInstancesForRole(role); - pos = m.getRangeInstancesForRole(role); - } - //choose 30 - - - Set<String> tmp=new HashSet<String>(); - for(String one:pos){ - tmp.add(one); - if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; - } - pos=tmp; - System.out.println("Instances used: "+pos.size()); - - tmp=new HashSet<String>(); - for(String one:neg){ - tmp.add(one); - if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; - } - neg=tmp; - - instances=new HashSet<String>(); - instances.addAll(pos); - - instances.addAll(neg); - - for(String one:pos){ - System.out.println("+\""+one+"\""); - } - for(String one:neg){ - System.out.println("-\""+one+"\""); - } - - /*Random r= new Random(); - - - Object[] arr=instances.toArray(); - while(instances.size()>=30){ - - }*/ - // add the role to the filter(a solution is always EXISTS - // role.TOP) - m.addPredicateFilter(role); - //System.out.println(instances); - // THIS is a workaround - - } - // the actual extraction is started here - ont = m.extract(instances); - System.out.println("Number of cached SPARQL queries: "+m.getConfiguration().numberOfCachedSparqlQueries); - System.out.println("Number of uncached SPARQL queries: "+m.getConfiguration().numberOfUncachedSparqlQueries); - - System.out.println("Finished collecting Fragment"); - - if (dumpToFile) { - String filename = System.currentTimeMillis() + ".nt"; - String basedir = "cache" + File.separator; - try { - if (!new File(basedir).exists()) - new File(basedir).mkdir(); - - FileWriter fw = new FileWriter(new File(basedir + filename), true); - fw.write(ont); - fw.flush(); - fw.close(); - - dumpFile = (new File(basedir + filename)).toURI().toURL(); - } catch (Exception e) { - e.printStackTrace(); - } - } - if (format.equals("KB")) { - try { - //kb = KBParser.parseKBFile(new StringReader(ont)); - kb=KBParser.parseKBFile(dumpFile); - } catch (Exception e) { - e.printStackTrace(); - } - } - } catch (Exception e) { - e.printStackTrace(); - } - System.out.println("SparqlModul: ****Finished"); - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.KnowledgeSource#toDIG() - */ - @Override - public String toDIG(URI kbURI) { - if (format.equals("N-TRIPLES")) - return JenaOWLDIGConverter.getTellsString(dumpFile, OntologyFormat.N_TRIPLES, kbURI); - else - return DIGConverter.getDIGString(kb, kbURI).toString(); - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.KnowledgeSource#export(java.io.File, - * org.dllearner.core.OntologyFormat) - */ - @Override - public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { - // currently no export functions implemented, so we just throw an - // exception - throw new OntologyFormatUnsupportedException("export", format); - } - - public URL getURL() { - return url; - } - - public String[] getOntArray() { - return ontArray; - } - - public void calculateSubjects(String label, int limit) { - System.out.println("SparqlModul: Collecting Subjects"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); - try { - subjects = oc.getSubjectsFromLabel(label, limit); - } catch (IOException e) { - subjects = new String[1]; - subjects[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public void calculateTriples(String subject) { - System.out.println("SparqlModul: Collecting Triples"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); - try { - triples = oc.collectTriples(subject); - } catch (IOException e) { - triples = new String[1]; - triples[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public void calculateConceptSubjects(String concept) { - System.out.println("SparqlModul: Collecting Subjects"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); - try { - conceptSubjects = oc.getSubjectsFromConcept(concept); - } catch (IOException e) { - conceptSubjects = new String[1]; - conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public boolean subjectThreadIsRunning() { - return subjectThreadRunning; - } - - public void setSubjectThreadRunning(boolean bool) { - subjectThreadRunning = bool; - } - - public boolean triplesThreadIsRunning() { - return triplesThreadRunning; - } - - public void setTriplesThreadRunning(boolean bool) { - triplesThreadRunning = bool; - } - - public boolean conceptThreadIsRunning() { - return conceptThreadRunning; - } - - public void setConceptThreadRunning(boolean bool) { - conceptThreadRunning = bool; - } - - public String[] getSubjects() { - return subjects; - } - - public Thread getSubjectThread() { - return subjectThread; - } - - public void setSubjectThread(Thread subjectThread) { - this.subjectThread = subjectThread; - } - - public Thread getTriplesThread() { - return triplesThread; - } - - public void setTriplesThread(Thread triplesThread) { - this.triplesThread = triplesThread; - } - - public Thread getConceptThread() { - return conceptThread; - } - - public void setConceptThread(Thread conceptThread) { - this.conceptThread = conceptThread; - } - - public String[] getTriples() { - return triples; - } - - public String[] getConceptSubjects() { - return conceptSubjects; - } -} Deleted: trunk/src/dl-learner/org/dllearner/kb/SparqlFilter.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/SparqlFilter.java 2008-01-13 10:22:16 UTC (rev 368) +++ trunk/src/dl-learner/org/dllearner/kb/SparqlFilter.java 2008-01-14 08:56:17 UTC (rev 369) @@ -1,119 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb; - -/** - * - * - * encapsulates all the options - * see the documentation for more help - * - * @author Sebastian Hellmann - * @author Sebastian Knappe - * - */ -public class SparqlFilter { - public int mode=0; - // 0 yago, 1 only cat, 2 skos+cat - String[] PredFilter=null; - String[] ObjFilter=null; - boolean useLiterals=false; - - - String[] yagoPredFilterDefault={ - "http://www.w3.org/2004/02/skos/core", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/property/wikipage-", - "http://www.w3.org/2002/07/owl#sameAs", - "http://dbpedia.org/property/reference" }; - String[] yagoObjFilterDefault={ - "http://dbpedia.org/resource/Category:Articles_", - "http://dbpedia.org/resource/Category:Wikipedia_", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/resource/Category", - "http://dbpedia.org/resource/Template", - "http://upload.wikimedia.org/wikipedia/commons"}; - - String[] onlyCatPredFilterDefault={ - "http://www.w3.org/2004/02/skos/core", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/property/wikipage-", - "http://www.w3.org/2002/07/owl#sameAs", - "http://dbpedia.org/property/reference" }; - String[] onlyCatObjFilterDefault={ - "http://dbpedia.org/resource/Category:Articles_", - "http://dbpedia.org/resource/Category:Wikipedia_", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/class/yago", - "http://dbpedia.org/resource/Template", - "http://upload.wikimedia.org/wikipedia/commons"}; - - String[] skosPredFilterDefault={ - "http://www.w3.org/2004/02/skos/core#narrower", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/property/wikipage-", - "http://www.w3.org/2002/07/owl#sameAs", - "http://dbpedia.org/property/reference" }; - String[] skosObjFilterDefault={ - "http://dbpedia.org/resource/Category:Articles_", - "http://dbpedia.org/resource/Category:Wikipedia_", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/class/yago", - "http://dbpedia.org/resource/Template", - "http://upload.wikimedia.org/wikipedia/commons"}; - - public SparqlFilter(int mode, String[] pred, String[] obj) { - if (mode==-1 && (pred==null || obj==null)) - {mode=0;} - this.mode=mode; - - switch (mode){ - case 0: //yago - ObjFilter=yagoObjFilterDefault; - PredFilter=yagoPredFilterDefault; - break; - case 1: // only Categories - ObjFilter=onlyCatObjFilterDefault; - PredFilter=onlyCatPredFilterDefault; - break; - case 2: // there are some other changes to, which are made directly in other functions - ObjFilter=skosObjFilterDefault; - PredFilter=skosPredFilterDefault; - break; - default: - ObjFilter=obj; - PredFilter=pred; - break; - } - } - - public SparqlFilter(int mode, String[] pred, String[] obj,boolean uselits) throws Exception{ - this(mode,pred,obj); - this.useLiterals=uselits; - } - - public String[] getObjFilter(){ - return this.ObjFilter; - } - - public String[] getPredFilter(){ - return this.PredFilter; - } -} \ No newline at end of file Deleted: trunk/src/dl-learner/org/dllearner/kb/SparqlOntologyCollector.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/SparqlOntologyCollector.java 2008-01-13 10:22:16 UTC (rev 368) +++ trunk/src/dl-learner/org/dllearner/kb/SparqlOntologyCollector.java 2008-01-14 08:56:17 UTC (rev 369) @@ -1,509 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.OutputStreamWriter; -import java.net.HttpURLConnection; -import java.net.URL; -import java.net.URLEncoder; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Vector; - - -/** - * This class collects the ontology from dbpedia, - * everything is saved in hashsets, so the doublettes are taken care of - * - * - * @author Sebastia... [truncated message content] |
From: <jen...@us...> - 2008-01-14 11:54:59
|
Revision: 370 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=370&view=rev Author: jenslehmann Date: 2008-01-14 03:54:55 -0800 (Mon, 14 Jan 2008) Log Message: ----------- - continued merge of SPARQL components - added example for SPARQL queries using Jena in org.dllearner.kb.sparql.SparqlQuery - updated and added Jena libraries @developers: Please update your classpath by adding the new jars in lib! Modified Paths: -------------- trunk/lib/jena/iri.jar trunk/lib/jena/jena.jar trunk/src/dl-learner/org/dllearner/cli/Start.java trunk/src/dl-learner/org/dllearner/server/ClientState.java trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java Added Paths: ----------- trunk/lib/jena/arq.jar trunk/lib/jena/commons-logging-1.1.jar trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java Removed Paths: ------------- trunk/lib/jena/commons-logging.jar trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java Added: trunk/lib/jena/arq.jar =================================================================== (Binary files differ) Property changes on: trunk/lib/jena/arq.jar ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Added: trunk/lib/jena/commons-logging-1.1.jar =================================================================== (Binary files differ) Property changes on: trunk/lib/jena/commons-logging-1.1.jar ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Deleted: trunk/lib/jena/commons-logging.jar =================================================================== (Binary files differ) Modified: trunk/lib/jena/iri.jar =================================================================== (Binary files differ) Modified: trunk/lib/jena/jena.jar =================================================================== (Binary files differ) Modified: trunk/src/dl-learner/org/dllearner/cli/Start.java =================================================================== --- trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-14 08:56:17 UTC (rev 369) +++ trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-14 11:54:55 UTC (rev 370) @@ -60,7 +60,7 @@ import org.dllearner.core.dl.Individual; import org.dllearner.kb.KBFile; import org.dllearner.kb.OWLFile; -import org.dllearner.kb.SparqlEndpoint; +import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.learningproblems.PosNegDefinitionLP; import org.dllearner.learningproblems.PosNegInclusionLP; import org.dllearner.learningproblems.PosOnlyDefinitionLP; Deleted: trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java 2008-01-14 08:56:17 UTC (rev 369) +++ trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java 2008-01-14 11:54:55 UTC (rev 370) @@ -1,510 +0,0 @@ -/** - * Copyright (C) 2007, Jens Lehmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.Set; - -import org.dllearner.core.KnowledgeSource; -import org.dllearner.core.OntologyFormat; -import org.dllearner.core.OntologyFormatUnsupportedException; -import org.dllearner.core.config.BooleanConfigOption; -import org.dllearner.core.config.ConfigEntry; -import org.dllearner.core.config.ConfigOption; -import org.dllearner.core.config.IntegerConfigOption; -import org.dllearner.core.config.InvalidConfigOptionValueException; -import org.dllearner.core.config.StringConfigOption; -import org.dllearner.core.config.StringSetConfigOption; -import org.dllearner.core.config.StringTupleListConfigOption; -import org.dllearner.core.dl.KB; -import org.dllearner.kb.sparql.Manager; -import org.dllearner.kb.sparql.Manipulator; -import org.dllearner.kb.sparql.PredefinedEndpoint; -import org.dllearner.kb.sparql.PredefinedFilter; -import org.dllearner.kb.sparql.SparqlOntologyCollector; -import org.dllearner.kb.sparql.SparqlQueryType; -import org.dllearner.kb.sparql.SpecificSparqlEndpoint; -import org.dllearner.parser.KBParser; -import org.dllearner.reasoning.DIGConverter; -import org.dllearner.reasoning.JenaOWLDIGConverter; -import org.dllearner.utilities.StringTuple; - -/** - * Represents a SPARQL Endpoint. - * - * @author Jens Lehmann - * @author Sebastian Knappe - * @author Sebastian Hellmann - */ -public class SparqlEndpoint extends KnowledgeSource { - - // ConfigOptions - private URL url; - String host; - private Set<String> instances=new HashSet<String>();; - private URL dumpFile; - private int recursionDepth = 1; - private int predefinedFilter = 0; - private int predefinedEndpoint = 0; - private Set<String> predList=new HashSet<String>(); - private Set<String> objList=new HashSet<String>(); - // private Set<String> classList; - private String format = "N-TRIPLES"; - private boolean dumpToFile = true; - private boolean useLits = false; - private boolean getAllSuperClasses = true; - private boolean closeAfterRecursion = true; - private int breakSuperClassRetrievalAfter = 200; - - private boolean learnDomain = false; - private boolean learnRange = false; - private int numberOfInstancesUsedForRoleLearning=40; - private String role=""; - private String blankNodeIdentifier = "bnode"; - - LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); - LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); - LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); - - /** - * Holds the results of the calculateSubjects method - */ - private String[] subjects; - - /** - * Holds the results of the calculateTriples method - */ - private String[] triples; - - /** - * Holds the results of the calculateConceptSubjects method - */ - private String[] conceptSubjects; - - /** - * if a method is running this becomes true - */ - private boolean subjectThreadRunning = false; - - private boolean triplesThreadRunning = false; - - private boolean conceptThreadRunning = false; - - /** - * the Thread that is running a method - */ - private Thread subjectThread; - - private Thread triplesThread; - - private Thread conceptThread; - - // received ontology as array, used if format=Array(an element of the - // array consists of the subject, predicate and object separated by '<' - private String[] ontArray; - - // received ontology as KB, the internal format - private KB kb; - - public static String getName() { - return "SPARQL Endpoint Restructured"; - } - - /** - * sets the ConfigOptions for this KnowledgeSource - * - * @return - */ - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); - options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); - options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); - options.add(new StringSetConfigOption("instances", - "relevant instances e.g. positive and negative examples in a learning problem")); - options.add(new IntegerConfigOption("recursionDepth", - "recursion depth of KB fragment selection", 2)); - options.add(new IntegerConfigOption("predefinedFilter", "the mode of the SPARQL Filter")); - options.add(new IntegerConfigOption("predefinedEndpoint", "the mode of the SPARQL Filter")); - - options.add(new StringSetConfigOption("predList", "list of all ignored roles")); - options.add(new StringSetConfigOption("objList", "list of all ignored objects")); - options.add(new StringSetConfigOption("classList", "list of all ignored classes")); - options.add(new StringConfigOption("format", "N-TRIPLES or KB format", "N-TRIPLES")); - options.add(new BooleanConfigOption("dumpToFile", - "Specifies whether the extracted ontology is written to a file or not.", true)); - options.add(new BooleanConfigOption("useLits", "use Literals in SPARQL query")); - options.add(new BooleanConfigOption("getAllSuperClasses", "If true then all superclasses are retrieved until the most general class (owl:Thing) is reached.", true)); - - options.add(new BooleanConfigOption("learnDomain", "learns the Domain for a Role")); - options.add(new BooleanConfigOption("learnRange", "learns the Range for a Role")); - options.add(new StringConfigOption("role", "role to learn Domain/Range from")); - options.add(new StringConfigOption("blankNodeIdentifier", - "used to identify blanknodes in Tripels")); - - options.add(new StringTupleListConfigOption("example", "example")); - options.add(new StringTupleListConfigOption("replacePredicate", "rule for replacing predicates")); - options.add(new StringTupleListConfigOption("replaceObject", "rule for replacing predicates")); - options.add(new IntegerConfigOption("breakSuperClassRetrievalAfter", "stops a cyclic hierarchy after specified number of classes")); - options.add(new IntegerConfigOption("numberOfInstancesUsedForRoleLearning", "")); - options.add(new BooleanConfigOption("closeAfterRecursion", "gets all classes for all instances")); - - - - return options; - } - - /* - * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) - */ - @Override - @SuppressWarnings( { "unchecked" }) - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { - String option = entry.getOptionName(); - if (option.equals("url")) { - String s = (String) entry.getValue(); - try { - url = new URL(s); - } catch (MalformedURLException e) { - throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(), - "malformed URL " + s); - } - } else if (option.equals("host")) { - host = (String) entry.getValue(); - } else if (option.equals("instances")) { - instances = (Set<String>) entry.getValue(); - } else if (option.equals("recursionDepth")) { - recursionDepth = (Integer) entry.getValue(); - } else if (option.equals("predList")) { - predList = (Set<String>) entry.getValue(); - } else if (option.equals("objList")) { - objList = (Set<String>) entry.getValue(); - //} else if (option.equals("classList")) { - // classList = (Set<String>) entry.getValue(); - } else if (option.equals("predefinedEndpoint")) { - predefinedEndpoint = (Integer) entry.getValue(); - } else if (option.equals("predefinedFilter")) { - predefinedFilter = (Integer) entry.getValue(); - } else if (option.equals("format")) { - format = (String) entry.getValue(); - } else if (option.equals("dumpToFile")) { - dumpToFile = (Boolean) entry.getValue(); - } else if (option.equals("useLits")) { - useLits = (Boolean) entry.getValue(); - } else if (option.equals("getAllSuperClasses")) { - getAllSuperClasses = (Boolean) entry.getValue(); - } else if (option.equals("learnDomain")) { - learnDomain = (Boolean) entry.getValue(); - }else if (option.equals("learnRange")) { - learnRange = (Boolean) entry.getValue(); - } else if (option.equals("role")) { - role = (String) entry.getValue(); - } else if (option.equals("blankNodeIdentifier")) { - blankNodeIdentifier = (String) entry.getValue(); - } else if (option.equals("example")) { - //System.out.println(entry.getValue()); - }else if (option.equals("replacePredicate")) { - replacePredicate = (LinkedList)entry.getValue(); - }else if (option.equals("replaceObject")) { - replaceObject = (LinkedList)entry.getValue(); - }else if (option.equals("breakSuperClassRetrievalAfter")) { - breakSuperClassRetrievalAfter = (Integer) entry.getValue(); - }else if (option.equals("numberOfInstancesUsedForRoleLearning")) { - numberOfInstancesUsedForRoleLearning = (Integer) entry.getValue(); - }else if (option.equals("closeAfterRecursion")) { - closeAfterRecursion = (Boolean) entry.getValue(); - } - - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.Component#init() - */ - @Override - public void init() { - System.out.println("SparqlModul: Collecting Ontology"); - // SparqlOntologyCollector oc= - // new SparqlOntologyCollector(Datastructures.setToArray(instances), - // numberOfRecursions, filterMode, - // Datastructures.setToArray(predList),Datastructures.setToArray( - // objList),Datastructures.setToArray(classList),format,url,useLits); - Manager m = new Manager(); - SpecificSparqlEndpoint sse = null; - SparqlQueryType sqt = null; - // get Options for Manipulator - Manipulator man = new Manipulator(blankNodeIdentifier,breakSuperClassRetrievalAfter,replacePredicate,replaceObject); - HashMap<String, String> parameters = new HashMap<String, String>(); - parameters.put("default-graph-uri", "http://dbpedia.org"); - parameters.put("format", "application/sparql-results.xml"); - - // get Options for endpoints - if (predefinedEndpoint >= 1) { - sse = PredefinedEndpoint.getEndpoint(predefinedEndpoint); - } else { - sse = new SpecificSparqlEndpoint(url, host, parameters); - } - - // get Options for Filters - - if (predefinedFilter >= 1) { - sqt = PredefinedFilter.getFilter(predefinedFilter); - - } else { - sqt = new SparqlQueryType("forbid", objList, predList, useLits + ""); - - } - // give everything to the manager - m.useConfiguration(sqt, sse, man, recursionDepth, getAllSuperClasses,closeAfterRecursion); - try { - String ont = ""; - //System.out.println(learnDomain); - // used to learn a domain of a role - if (learnDomain || learnRange) { - Set<String> pos=new HashSet<String>(); - Set<String> neg=new HashSet<String>(); - if(learnDomain){ - pos = m.getDomainInstancesForRole(role); - neg = m.getRangeInstancesForRole(role); - }else if(learnRange){ - neg = m.getDomainInstancesForRole(role); - pos = m.getRangeInstancesForRole(role); - } - //choose 30 - - - Set<String> tmp=new HashSet<String>(); - for(String one:pos){ - tmp.add(one); - if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; - } - pos=tmp; - System.out.println("Instances used: "+pos.size()); - - tmp=new HashSet<String>(); - for(String one:neg){ - tmp.add(one); - if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; - } - neg=tmp; - - instances=new HashSet<String>(); - instances.addAll(pos); - - instances.addAll(neg); - - for(String one:pos){ - System.out.println("+\""+one+"\""); - } - for(String one:neg){ - System.out.println("-\""+one+"\""); - } - - /*Random r= new Random(); - - - Object[] arr=instances.toArray(); - while(instances.size()>=30){ - - }*/ - // add the role to the filter(a solution is always EXISTS - // role.TOP) - m.addPredicateFilter(role); - //System.out.println(instances); - // THIS is a workaround - - } - // the actual extraction is started here - ont = m.extract(instances); - System.out.println("Number of cached SPARQL queries: "+m.getConfiguration().numberOfCachedSparqlQueries); - System.out.println("Number of uncached SPARQL queries: "+m.getConfiguration().numberOfUncachedSparqlQueries); - - System.out.println("Finished collecting Fragment"); - - if (dumpToFile) { - String filename = System.currentTimeMillis() + ".nt"; - String basedir = "cache" + File.separator; - try { - if (!new File(basedir).exists()) - new File(basedir).mkdir(); - - FileWriter fw = new FileWriter(new File(basedir + filename), true); - fw.write(ont); - fw.flush(); - fw.close(); - - dumpFile = (new File(basedir + filename)).toURI().toURL(); - } catch (Exception e) { - e.printStackTrace(); - } - } - if (format.equals("KB")) { - try { - //kb = KBParser.parseKBFile(new StringReader(ont)); - kb=KBParser.parseKBFile(dumpFile); - } catch (Exception e) { - e.printStackTrace(); - } - } - } catch (Exception e) { - e.printStackTrace(); - } - System.out.println("SparqlModul: ****Finished"); - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.KnowledgeSource#toDIG() - */ - @Override - public String toDIG(URI kbURI) { - if (format.equals("N-TRIPLES")) - return JenaOWLDIGConverter.getTellsString(dumpFile, OntologyFormat.N_TRIPLES, kbURI); - else - return DIGConverter.getDIGString(kb, kbURI).toString(); - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.KnowledgeSource#export(java.io.File, - * org.dllearner.core.OntologyFormat) - */ - @Override - public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { - // currently no export functions implemented, so we just throw an - // exception - throw new OntologyFormatUnsupportedException("export", format); - } - - public URL getURL() { - return url; - } - - public String[] getOntArray() { - return ontArray; - } - - public void calculateSubjects(String label, int limit) { - System.out.println("SparqlModul: Collecting Subjects"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); - try { - subjects = oc.getSubjectsFromLabel(label, limit); - } catch (IOException e) { - subjects = new String[1]; - subjects[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public void calculateTriples(String subject) { - System.out.println("SparqlModul: Collecting Triples"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); - try { - triples = oc.collectTriples(subject); - } catch (IOException e) { - triples = new String[1]; - triples[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public void calculateConceptSubjects(String concept) { - System.out.println("SparqlModul: Collecting Subjects"); - SparqlOntologyCollector oc = new SparqlOntologyCollector(url); - try { - conceptSubjects = oc.getSubjectsFromConcept(concept); - } catch (IOException e) { - conceptSubjects = new String[1]; - conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public boolean subjectThreadIsRunning() { - return subjectThreadRunning; - } - - public void setSubjectThreadRunning(boolean bool) { - subjectThreadRunning = bool; - } - - public boolean triplesThreadIsRunning() { - return triplesThreadRunning; - } - - public void setTriplesThreadRunning(boolean bool) { - triplesThreadRunning = bool; - } - - public boolean conceptThreadIsRunning() { - return conceptThreadRunning; - } - - public void setConceptThreadRunning(boolean bool) { - conceptThreadRunning = bool; - } - - public String[] getSubjects() { - return subjects; - } - - public Thread getSubjectThread() { - return subjectThread; - } - - public void setSubjectThread(Thread subjectThread) { - this.subjectThread = subjectThread; - } - - public Thread getTriplesThread() { - return triplesThread; - } - - public void setTriplesThread(Thread triplesThread) { - this.triplesThread = triplesThread; - } - - public Thread getConceptThread() { - return conceptThread; - } - - public void setConceptThread(Thread conceptThread) { - this.conceptThread = conceptThread; - } - - public String[] getTriples() { - return triples; - } - - public String[] getConceptSubjects() { - return conceptSubjects; - } -} Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java (from rev 369, trunk/src/dl-learner/org/dllearner/kb/SparqlEndpoint.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlEndpoint.java 2008-01-14 11:54:55 UTC (rev 370) @@ -0,0 +1,503 @@ +/** + * Copyright (C) 2007, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb.sparql; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URL; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Set; + +import org.dllearner.core.KnowledgeSource; +import org.dllearner.core.OntologyFormat; +import org.dllearner.core.OntologyFormatUnsupportedException; +import org.dllearner.core.config.BooleanConfigOption; +import org.dllearner.core.config.ConfigEntry; +import org.dllearner.core.config.ConfigOption; +import org.dllearner.core.config.IntegerConfigOption; +import org.dllearner.core.config.InvalidConfigOptionValueException; +import org.dllearner.core.config.StringConfigOption; +import org.dllearner.core.config.StringSetConfigOption; +import org.dllearner.core.config.StringTupleListConfigOption; +import org.dllearner.core.dl.KB; +import org.dllearner.parser.KBParser; +import org.dllearner.reasoning.DIGConverter; +import org.dllearner.reasoning.JenaOWLDIGConverter; +import org.dllearner.utilities.StringTuple; + +/** + * Represents a SPARQL Endpoint. + * + * @author Jens Lehmann + * @author Sebastian Knappe + * @author Sebastian Hellmann + */ +public class SparqlEndpoint extends KnowledgeSource { + + // ConfigOptions + private URL url; + String host; + private Set<String> instances=new HashSet<String>();; + private URL dumpFile; + private int recursionDepth = 1; + private int predefinedFilter = 0; + private int predefinedEndpoint = 0; + private Set<String> predList=new HashSet<String>(); + private Set<String> objList=new HashSet<String>(); + // private Set<String> classList; + private String format = "N-TRIPLES"; + private boolean dumpToFile = true; + private boolean useLits = false; + private boolean getAllSuperClasses = true; + private boolean closeAfterRecursion = true; + private int breakSuperClassRetrievalAfter = 200; + + private boolean learnDomain = false; + private boolean learnRange = false; + private int numberOfInstancesUsedForRoleLearning=40; + private String role=""; + private String blankNodeIdentifier = "bnode"; + + LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); + LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); + LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); + + /** + * Holds the results of the calculateSubjects method + */ + private String[] subjects; + + /** + * Holds the results of the calculateTriples method + */ + private String[] triples; + + /** + * Holds the results of the calculateConceptSubjects method + */ + private String[] conceptSubjects; + + /** + * if a method is running this becomes true + */ + private boolean subjectThreadRunning = false; + + private boolean triplesThreadRunning = false; + + private boolean conceptThreadRunning = false; + + /** + * the Thread that is running a method + */ + private Thread subjectThread; + + private Thread triplesThread; + + private Thread conceptThread; + + // received ontology as array, used if format=Array(an element of the + // array consists of the subject, predicate and object separated by '<' + private String[] ontArray; + + // received ontology as KB, the internal format + private KB kb; + + public static String getName() { + return "SPARQL Endpoint Restructured"; + } + + /** + * sets the ConfigOptions for this KnowledgeSource + * + * @return + */ + public static Collection<ConfigOption<?>> createConfigOptions() { + Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); + options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); + options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); + options.add(new StringSetConfigOption("instances", + "relevant instances e.g. positive and negative examples in a learning problem")); + options.add(new IntegerConfigOption("recursionDepth", + "recursion depth of KB fragment selection", 2)); + options.add(new IntegerConfigOption("predefinedFilter", "the mode of the SPARQL Filter")); + options.add(new IntegerConfigOption("predefinedEndpoint", "the mode of the SPARQL Filter")); + + options.add(new StringSetConfigOption("predList", "list of all ignored roles")); + options.add(new StringSetConfigOption("objList", "list of all ignored objects")); + options.add(new StringSetConfigOption("classList", "list of all ignored classes")); + options.add(new StringConfigOption("format", "N-TRIPLES or KB format", "N-TRIPLES")); + options.add(new BooleanConfigOption("dumpToFile", + "Specifies whether the extracted ontology is written to a file or not.", true)); + options.add(new BooleanConfigOption("useLits", "use Literals in SPARQL query")); + options.add(new BooleanConfigOption("getAllSuperClasses", "If true then all superclasses are retrieved until the most general class (owl:Thing) is reached.", true)); + + options.add(new BooleanConfigOption("learnDomain", "learns the Domain for a Role")); + options.add(new BooleanConfigOption("learnRange", "learns the Range for a Role")); + options.add(new StringConfigOption("role", "role to learn Domain/Range from")); + options.add(new StringConfigOption("blankNodeIdentifier", + "used to identify blanknodes in Tripels")); + + options.add(new StringTupleListConfigOption("example", "example")); + options.add(new StringTupleListConfigOption("replacePredicate", "rule for replacing predicates")); + options.add(new StringTupleListConfigOption("replaceObject", "rule for replacing predicates")); + options.add(new IntegerConfigOption("breakSuperClassRetrievalAfter", "stops a cyclic hierarchy after specified number of classes")); + options.add(new IntegerConfigOption("numberOfInstancesUsedForRoleLearning", "")); + options.add(new BooleanConfigOption("closeAfterRecursion", "gets all classes for all instances")); + + + + return options; + } + + /* + * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) + */ + @Override + @SuppressWarnings( { "unchecked" }) + public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { + String option = entry.getOptionName(); + if (option.equals("url")) { + String s = (String) entry.getValue(); + try { + url = new URL(s); + } catch (MalformedURLException e) { + throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(), + "malformed URL " + s); + } + } else if (option.equals("host")) { + host = (String) entry.getValue(); + } else if (option.equals("instances")) { + instances = (Set<String>) entry.getValue(); + } else if (option.equals("recursionDepth")) { + recursionDepth = (Integer) entry.getValue(); + } else if (option.equals("predList")) { + predList = (Set<String>) entry.getValue(); + } else if (option.equals("objList")) { + objList = (Set<String>) entry.getValue(); + //} else if (option.equals("classList")) { + // classList = (Set<String>) entry.getValue(); + } else if (option.equals("predefinedEndpoint")) { + predefinedEndpoint = (Integer) entry.getValue(); + } else if (option.equals("predefinedFilter")) { + predefinedFilter = (Integer) entry.getValue(); + } else if (option.equals("format")) { + format = (String) entry.getValue(); + } else if (option.equals("dumpToFile")) { + dumpToFile = (Boolean) entry.getValue(); + } else if (option.equals("useLits")) { + useLits = (Boolean) entry.getValue(); + } else if (option.equals("getAllSuperClasses")) { + getAllSuperClasses = (Boolean) entry.getValue(); + } else if (option.equals("learnDomain")) { + learnDomain = (Boolean) entry.getValue(); + }else if (option.equals("learnRange")) { + learnRange = (Boolean) entry.getValue(); + } else if (option.equals("role")) { + role = (String) entry.getValue(); + } else if (option.equals("blankNodeIdentifier")) { + blankNodeIdentifier = (String) entry.getValue(); + } else if (option.equals("example")) { + //System.out.println(entry.getValue()); + }else if (option.equals("replacePredicate")) { + replacePredicate = (LinkedList)entry.getValue(); + }else if (option.equals("replaceObject")) { + replaceObject = (LinkedList)entry.getValue(); + }else if (option.equals("breakSuperClassRetrievalAfter")) { + breakSuperClassRetrievalAfter = (Integer) entry.getValue(); + }else if (option.equals("numberOfInstancesUsedForRoleLearning")) { + numberOfInstancesUsedForRoleLearning = (Integer) entry.getValue(); + }else if (option.equals("closeAfterRecursion")) { + closeAfterRecursion = (Boolean) entry.getValue(); + } + + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.Component#init() + */ + @Override + public void init() { + System.out.println("SparqlModul: Collecting Ontology"); + // SparqlOntologyCollector oc= + // new SparqlOntologyCollector(Datastructures.setToArray(instances), + // numberOfRecursions, filterMode, + // Datastructures.setToArray(predList),Datastructures.setToArray( + // objList),Datastructures.setToArray(classList),format,url,useLits); + Manager m = new Manager(); + SpecificSparqlEndpoint sse = null; + SparqlQueryType sqt = null; + // get Options for Manipulator + Manipulator man = new Manipulator(blankNodeIdentifier,breakSuperClassRetrievalAfter,replacePredicate,replaceObject); + HashMap<String, String> parameters = new HashMap<String, String>(); + parameters.put("default-graph-uri", "http://dbpedia.org"); + parameters.put("format", "application/sparql-results.xml"); + + // get Options for endpoints + if (predefinedEndpoint >= 1) { + sse = PredefinedEndpoint.getEndpoint(predefinedEndpoint); + } else { + sse = new SpecificSparqlEndpoint(url, host, parameters); + } + + // get Options for Filters + + if (predefinedFilter >= 1) { + sqt = PredefinedFilter.getFilter(predefinedFilter); + + } else { + sqt = new SparqlQueryType("forbid", objList, predList, useLits + ""); + + } + // give everything to the manager + m.useConfiguration(sqt, sse, man, recursionDepth, getAllSuperClasses,closeAfterRecursion); + try { + String ont = ""; + //System.out.println(learnDomain); + // used to learn a domain of a role + if (learnDomain || learnRange) { + Set<String> pos=new HashSet<String>(); + Set<String> neg=new HashSet<String>(); + if(learnDomain){ + pos = m.getDomainInstancesForRole(role); + neg = m.getRangeInstancesForRole(role); + }else if(learnRange){ + neg = m.getDomainInstancesForRole(role); + pos = m.getRangeInstancesForRole(role); + } + //choose 30 + + + Set<String> tmp=new HashSet<String>(); + for(String one:pos){ + tmp.add(one); + if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; + } + pos=tmp; + System.out.println("Instances used: "+pos.size()); + + tmp=new HashSet<String>(); + for(String one:neg){ + tmp.add(one); + if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; + } + neg=tmp; + + instances=new HashSet<String>(); + instances.addAll(pos); + + instances.addAll(neg); + + for(String one:pos){ + System.out.println("+\""+one+"\""); + } + for(String one:neg){ + System.out.println("-\""+one+"\""); + } + + /*Random r= new Random(); + + + Object[] arr=instances.toArray(); + while(instances.size()>=30){ + + }*/ + // add the role to the filter(a solution is always EXISTS + // role.TOP) + m.addPredicateFilter(role); + //System.out.println(instances); + // THIS is a workaround + + } + // the actual extraction is started here + ont = m.extract(instances); + System.out.println("Number of cached SPARQL queries: "+m.getConfiguration().numberOfCachedSparqlQueries); + System.out.println("Number of uncached SPARQL queries: "+m.getConfiguration().numberOfUncachedSparqlQueries); + + System.out.println("Finished collecting Fragment"); + + if (dumpToFile) { + String filename = System.currentTimeMillis() + ".nt"; + String basedir = "cache" + File.separator; + try { + if (!new File(basedir).exists()) + new File(basedir).mkdir(); + + FileWriter fw = new FileWriter(new File(basedir + filename), true); + fw.write(ont); + fw.flush(); + fw.close(); + + dumpFile = (new File(basedir + filename)).toURI().toURL(); + } catch (Exception e) { + e.printStackTrace(); + } + } + if (format.equals("KB")) { + try { + //kb = KBParser.parseKBFile(new StringReader(ont)); + kb=KBParser.parseKBFile(dumpFile); + } catch (Exception e) { + e.printStackTrace(); + } + } + } catch (Exception e) { + e.printStackTrace(); + } + System.out.println("SparqlModul: ****Finished"); + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.KnowledgeSource#toDIG() + */ + @Override + public String toDIG(URI kbURI) { + if (format.equals("N-TRIPLES")) + return JenaOWLDIGConverter.getTellsString(dumpFile, OntologyFormat.N_TRIPLES, kbURI); + else + return DIGConverter.getDIGString(kb, kbURI).toString(); + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.KnowledgeSource#export(java.io.File, + * org.dllearner.core.OntologyFormat) + */ + @Override + public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { + // currently no export functions implemented, so we just throw an + // exception + throw new OntologyFormatUnsupportedException("export", format); + } + + public URL getURL() { + return url; + } + + public String[] getOntArray() { + return ontArray; + } + + public void calculateSubjects(String label, int limit) { + System.out.println("SparqlModul: Collecting Subjects"); + SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + try { + subjects = oc.getSubjectsFromLabel(label, limit); + } catch (IOException e) { + subjects = new String[1]; + subjects[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + public void calculateTriples(String subject) { + System.out.println("SparqlModul: Collecting Triples"); + SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + try { + triples = oc.collectTriples(subject); + } catch (IOException e) { + triples = new String[1]; + triples[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + public void calculateConceptSubjects(String concept) { + System.out.println("SparqlModul: Collecting Subjects"); + SparqlOntologyCollector oc = new SparqlOntologyCollector(url); + try { + conceptSubjects = oc.getSubjectsFromConcept(concept); + } catch (IOException e) { + conceptSubjects = new String[1]; + conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + public boolean subjectThreadIsRunning() { + return subjectThreadRunning; + } + + public void setSubjectThreadRunning(boolean bool) { + subjectThreadRunning = bool; + } + + public boolean triplesThreadIsRunning() { + return triplesThreadRunning; + } + + public void setTriplesThreadRunning(boolean bool) { + triplesThreadRunning = bool; + } + + public boolean conceptThreadIsRunning() { + return conceptThreadRunning; + } + + public void setConceptThreadRunning(boolean bool) { + conceptThreadRunning = bool; + } + + public String[] getSubjects() { + return subjects; + } + + public Thread getSubjectThread() { + return subjectThread; + } + + public void setSubjectThread(Thread subjectThread) { + this.subjectThread = subjectThread; + } + + public Thread getTriplesThread() { + return triplesThread; + } + + public void setTriplesThread(Thread triplesThread) { + this.triplesThread = triplesThread; + } + + public Thread getConceptThread() { + return conceptThread; + } + + public void setConceptThread(Thread conceptThread) { + this.conceptThread = conceptThread; + } + + public String[] getTriples() { + return triples; + } + + public String[] getConceptSubjects() { + return conceptSubjects; + } +} Added: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java 2008-01-14 11:54:55 UTC (rev 370) @@ -0,0 +1,89 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb.sparql; + +import com.hp.hpl.jena.query.Query; +import com.hp.hpl.jena.query.QueryExecution; +import com.hp.hpl.jena.query.QueryExecutionFactory; +import com.hp.hpl.jena.query.QueryFactory; +import com.hp.hpl.jena.query.ResultSet; +import com.hp.hpl.jena.query.ResultSetFormatter; + +/** + * Represents a SPARQL query. It includes support for stopping the SPARQL + * query (which may be necessary if a timeout is reached). + * + * TODO: It is probably good to change all SPARQL query calls to use only + * this class. + * + * TODO: Could we use Jena as a solid foundation here? (com.hp.jena.query) + * + * @author Jens Lehmann + * + */ +public class SparqlQuery { + + private boolean isRunning = false; + + public SparqlQuery(SpecificSparqlEndpoint endpoint, String query) { + + } + + public void send() { + isRunning = true; + + // ... send query + // ... check periodically whether isRunning is still true, if not + // abort the query + } + + public void stop() { + isRunning = false; + } + + public boolean isRunning() { + return isRunning; + } + + // this is a working Jena script + // TODO: query runtime seems to be much too high (compared to running it in http://dbpedia.org/sparql) + // verify whether our SPARQL query implementation is faster and why; + // TODO: check whether Jena works with the other endpoints in PredefinedEndpoint; if not + // check whether it can be configured to run with these + public static void main(String[] args) { + + String queryString = "PREFIX dbpedia2: <http://dbpedia.org/property/> " + + "PREFIX skos: <http://www.w3.org/2004/02/skos/core#>" + + "SELECT ?episode ?chalkboard_gag WHERE { ?episode skos:subject" + + " <http://dbpedia.org/resource/Category:The_Simpsons_episodes%2C_season_12>." + + " ?episode dbpedia2:blackboard ?chalkboard_gag }"; + + System.out.println(queryString); + // create a query and parse it into Jena + Query query = QueryFactory.create(queryString); + query.validate(); + // Jena access to DBpedia SPARQL endpoint + QueryExecution queryExecution = QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql", query); + System.out.println("query SPARQL server"); + ResultSet rs = queryExecution.execSelect(); + ResultSetFormatter.out(System.out, rs, query) ; + } + +} Modified: trunk/src/dl-learner/org/dllearner/server/ClientState.java =================================================================== --- trunk/src/dl-learner/org/dllearner/server/ClientState.java 2008-01-14 08:56:17 UTC (rev 369) +++ trunk/src/dl-learner/org/dllearner/server/ClientState.java 2008-01-14 11:54:55 UTC (rev 370) @@ -33,7 +33,7 @@ import org.dllearner.core.ReasonerComponent; import org.dllearner.core.ReasoningService; import org.dllearner.kb.OWLFile; -import org.dllearner.kb.SparqlEndpoint; +import org.dllearner.kb.sparql.SparqlEndpoint; /** * Stores the state of a DL-Learner client session. Modified: trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java =================================================================== --- trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java 2008-01-14 08:56:17 UTC (rev 369) +++ trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java 2008-01-14 11:54:55 UTC (rev 370) @@ -46,7 +46,7 @@ import org.dllearner.core.dl.Concept; import org.dllearner.core.dl.Individual; import org.dllearner.kb.OWLFile; -import org.dllearner.kb.SparqlEndpoint; +import org.dllearner.kb.sparql.SparqlEndpoint; import org.dllearner.learningproblems.PosNegDefinitionLP; import org.dllearner.learningproblems.PosNegInclusionLP; import org.dllearner.learningproblems.PosOnlyDefinitionLP; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-01-18 13:11:00
|
Revision: 392 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=392&view=rev Author: jenslehmann Date: 2008-01-18 05:10:58 -0800 (Fri, 18 Jan 2008) Log Message: ----------- - corrected Sparql component entry in components.ini (such that it can be used in DL-Learner) - implemented interruptable Jena SPARQL query Modified Paths: -------------- trunk/lib/components.ini trunk/src/dl-learner/org/dllearner/cli/Start.java trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedEndpoint.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/JenaTestScript.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryAbstract.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryConventional.java trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java trunk/src/dl-learner/org/dllearner/server/ClientState.java trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/kb/SparqlKnowledgeSource.java trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SpecificSparqlEndpoint.java Modified: trunk/lib/components.ini =================================================================== --- trunk/lib/components.ini 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/lib/components.ini 2008-01-18 13:10:58 UTC (rev 392) @@ -3,7 +3,7 @@ # knowledge sources org.dllearner.kb.OWLFile org.dllearner.kb.KBFile -org.dllearner.kb.sparql.SparqlEndpoint +org.dllearner.kb.sparql.SparqlKnowledgeSource # reasoners org.dllearner.reasoning.OWLAPIReasoner org.dllearner.reasoning.DIGReasoner Modified: trunk/src/dl-learner/org/dllearner/cli/Start.java =================================================================== --- trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-18 13:10:58 UTC (rev 392) @@ -60,7 +60,7 @@ import org.dllearner.core.dl.Individual; import org.dllearner.kb.KBFile; import org.dllearner.kb.OWLFile; -import org.dllearner.kb.SparqlKnowledgeSource; +import org.dllearner.kb.sparql.SparqlKnowledgeSource; import org.dllearner.learningproblems.PosNegDefinitionLP; import org.dllearner.learningproblems.PosNegInclusionLP; import org.dllearner.learningproblems.PosOnlyDefinitionLP; Deleted: trunk/src/dl-learner/org/dllearner/kb/SparqlKnowledgeSource.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/SparqlKnowledgeSource.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/SparqlKnowledgeSource.java 2008-01-18 13:10:58 UTC (rev 392) @@ -1,524 +0,0 @@ -/** - * Copyright (C) 2007, Jens Lehmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URI; -import java.net.URL; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.Set; - -import org.dllearner.core.KnowledgeSource; -import org.dllearner.core.OntologyFormat; -import org.dllearner.core.OntologyFormatUnsupportedException; -import org.dllearner.core.config.BooleanConfigOption; -import org.dllearner.core.config.ConfigEntry; -import org.dllearner.core.config.ConfigOption; -import org.dllearner.core.config.IntegerConfigOption; -import org.dllearner.core.config.InvalidConfigOptionValueException; -import org.dllearner.core.config.StringConfigOption; -import org.dllearner.core.config.StringSetConfigOption; -import org.dllearner.core.config.StringTupleListConfigOption; -import org.dllearner.core.dl.KB; -import org.dllearner.kb.sparql.Manager; -import org.dllearner.kb.sparql.Manipulator; -import org.dllearner.kb.sparql.configuration.PredefinedEndpoint; -import org.dllearner.kb.sparql.configuration.PredefinedFilter; -import org.dllearner.kb.sparql.configuration.SparqlQueryType; -import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; -import org.dllearner.kb.sparql.old.*; -import org.dllearner.parser.KBParser; -import org.dllearner.reasoning.DIGConverter; -import org.dllearner.reasoning.JenaOWLDIGConverter; -import org.dllearner.utilities.StringTuple; - -/** - * Represents a SPARQL Endpoint. - * - * @author Jens Lehmann - * @author Sebastian Knappe - * @author Sebastian Hellmann - */ -public class SparqlKnowledgeSource extends KnowledgeSource { - - // ConfigOptions - private URL url; - String host; - private Set<String> instances=new HashSet<String>();; - private URL dumpFile; - private int recursionDepth = 1; - private int predefinedFilter = 0; - private int predefinedEndpoint = 0; - private Set<String> predList=new HashSet<String>(); - private Set<String> objList=new HashSet<String>(); - // private Set<String> classList; - private String format = "N-TRIPLES"; - private boolean dumpToFile = true; - private boolean useLits = false; - private boolean getAllSuperClasses = true; - private boolean closeAfterRecursion = true; - private int breakSuperClassRetrievalAfter = 200; - - private boolean learnDomain = false; - private boolean learnRange = false; - private int numberOfInstancesUsedForRoleLearning=40; - private String role=""; - private String blankNodeIdentifier = "bnode"; - - LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); - LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); - LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); - - /** - * Holds the results of the calculateSubjects method - */ - private String[] subjects; - - /** - * Holds the results of the calculateTriples method - */ - private String[] triples; - - /** - * Holds the results of the calculateConceptSubjects method - */ - private String[] conceptSubjects; - - /** - * if a method is running this becomes true - */ - private boolean subjectThreadRunning = false; - - private boolean triplesThreadRunning = false; - - private boolean conceptThreadRunning = false; - - /** - * the Thread that is running a method - */ - private Thread subjectThread; - - private Thread triplesThread; - - private Thread conceptThread; - - // received ontology as array, used if format=Array(an element of the - // array consists of the subject, predicate and object separated by '<' - private String[] ontArray; - - // received ontology as KB, the internal format - private KB kb; - - public static String getName() { - return "SPARQL Endpoint Restructured"; - } - - /** - * sets the ConfigOptions for this KnowledgeSource - * - * @return - */ - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); - options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); - options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); - options.add(new StringSetConfigOption("instances", - "relevant instances e.g. positive and negative examples in a learning problem")); - options.add(new IntegerConfigOption("recursionDepth", - "recursion depth of KB fragment selection", 2)); - options.add(new IntegerConfigOption("predefinedFilter", "the mode of the SPARQL Filter")); - options.add(new IntegerConfigOption("predefinedEndpoint", "the mode of the SPARQL Filter")); - - options.add(new StringSetConfigOption("predList", "list of all ignored roles")); - options.add(new StringSetConfigOption("objList", "list of all ignored objects")); - options.add(new StringSetConfigOption("classList", "list of all ignored classes")); - options.add(new StringConfigOption("format", "N-TRIPLES or KB format", "N-TRIPLES")); - options.add(new BooleanConfigOption("dumpToFile", - "Specifies whether the extracted ontology is written to a file or not.", true)); - options.add(new BooleanConfigOption("useLits", "use Literals in SPARQL query")); - options.add(new BooleanConfigOption("getAllSuperClasses", "If true then all superclasses are retrieved until the most general class (owl:Thing) is reached.", true)); - - options.add(new BooleanConfigOption("learnDomain", "learns the Domain for a Role")); - options.add(new BooleanConfigOption("learnRange", "learns the Range for a Role")); - options.add(new StringConfigOption("role", "role to learn Domain/Range from")); - options.add(new StringConfigOption("blankNodeIdentifier", - "used to identify blanknodes in Tripels")); - - options.add(new StringTupleListConfigOption("example", "example")); - options.add(new StringTupleListConfigOption("replacePredicate", "rule for replacing predicates")); - options.add(new StringTupleListConfigOption("replaceObject", "rule for replacing predicates")); - options.add(new IntegerConfigOption("breakSuperClassRetrievalAfter", "stops a cyclic hierarchy after specified number of classes")); - options.add(new IntegerConfigOption("numberOfInstancesUsedForRoleLearning", "")); - options.add(new BooleanConfigOption("closeAfterRecursion", "gets all classes for all instances")); - - - - return options; - } - - /* - * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) - */ - @Override - @SuppressWarnings( { "unchecked" }) - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { - String option = entry.getOptionName(); - if (option.equals("url")) { - String s = (String) entry.getValue(); - try { - url = new URL(s); - } catch (MalformedURLException e) { - throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(), - "malformed URL " + s); - } - } else if (option.equals("host")) { - host = (String) entry.getValue(); - } else if (option.equals("instances")) { - instances = (Set<String>) entry.getValue(); - } else if (option.equals("recursionDepth")) { - recursionDepth = (Integer) entry.getValue(); - } else if (option.equals("predList")) { - predList = (Set<String>) entry.getValue(); - } else if (option.equals("objList")) { - objList = (Set<String>) entry.getValue(); - //} else if (option.equals("classList")) { - // classList = (Set<String>) entry.getValue(); - } else if (option.equals("predefinedEndpoint")) { - predefinedEndpoint = (Integer) entry.getValue(); - } else if (option.equals("predefinedFilter")) { - predefinedFilter = (Integer) entry.getValue(); - } else if (option.equals("format")) { - format = (String) entry.getValue(); - } else if (option.equals("dumpToFile")) { - dumpToFile = (Boolean) entry.getValue(); - } else if (option.equals("useLits")) { - useLits = (Boolean) entry.getValue(); - } else if (option.equals("getAllSuperClasses")) { - getAllSuperClasses = (Boolean) entry.getValue(); - } else if (option.equals("learnDomain")) { - learnDomain = (Boolean) entry.getValue(); - }else if (option.equals("learnRange")) { - learnRange = (Boolean) entry.getValue(); - } else if (option.equals("role")) { - role = (String) entry.getValue(); - } else if (option.equals("blankNodeIdentifier")) { - blankNodeIdentifier = (String) entry.getValue(); - } else if (option.equals("example")) { - //System.out.println(entry.getValue()); - }else if (option.equals("replacePredicate")) { - replacePredicate = (LinkedList)entry.getValue(); - }else if (option.equals("replaceObject")) { - replaceObject = (LinkedList)entry.getValue(); - }else if (option.equals("breakSuperClassRetrievalAfter")) { - breakSuperClassRetrievalAfter = (Integer) entry.getValue(); - }else if (option.equals("numberOfInstancesUsedForRoleLearning")) { - numberOfInstancesUsedForRoleLearning = (Integer) entry.getValue(); - }else if (option.equals("closeAfterRecursion")) { - closeAfterRecursion = (Boolean) entry.getValue(); - } - - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.Component#init() - */ - @Override - public void init() { - System.out.println("SparqlModul: Collecting Ontology"); - // SparqlOntologyCollector oc= - // new SparqlOntologyCollector(Datastructures.setToArray(instances), - // numberOfRecursions, filterMode, - // Datastructures.setToArray(predList),Datastructures.setToArray( - // objList),Datastructures.setToArray(classList),format,url,useLits); - Manager m = new Manager(); - SpecificSparqlEndpoint sse = null; - SparqlQueryType sqt = null; - // get Options for Manipulator - Manipulator man = new Manipulator(blankNodeIdentifier,breakSuperClassRetrievalAfter,replacePredicate,replaceObject); - HashMap<String, String> parameters = new HashMap<String, String>(); - parameters.put("default-graph-uri", "http://dbpedia.org"); - parameters.put("format", "application/sparql-results.xml"); - - // get Options for endpoints - if (predefinedEndpoint >= 1) { - sse = PredefinedEndpoint.getEndpoint(predefinedEndpoint); - } else { - sse = new SpecificSparqlEndpoint(url, host, parameters); - } - - // get Options for Filters - - if (predefinedFilter >= 1) { - sqt = PredefinedFilter.getFilter(predefinedFilter); - - } else { - sqt = new SparqlQueryType("forbid", objList, predList, useLits + ""); - - } - // give everything to the manager - m.useConfiguration(sqt, sse, man, recursionDepth, getAllSuperClasses,closeAfterRecursion); - try { - String ont = ""; - //System.out.println(learnDomain); - // used to learn a domain of a role - if (learnDomain || learnRange) { - Set<String> pos=new HashSet<String>(); - Set<String> neg=new HashSet<String>(); - if(learnDomain){ - pos = m.getDomainInstancesForRole(role); - neg = m.getRangeInstancesForRole(role); - }else if(learnRange){ - neg = m.getDomainInstancesForRole(role); - pos = m.getRangeInstancesForRole(role); - } - //choose 30 - - - Set<String> tmp=new HashSet<String>(); - for(String one:pos){ - tmp.add(one); - if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; - } - pos=tmp; - System.out.println("Instances used: "+pos.size()); - - tmp=new HashSet<String>(); - for(String one:neg){ - tmp.add(one); - if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; - } - neg=tmp; - - instances=new HashSet<String>(); - instances.addAll(pos); - - instances.addAll(neg); - - for(String one:pos){ - System.out.println("+\""+one+"\""); - } - for(String one:neg){ - System.out.println("-\""+one+"\""); - } - - /*Random r= new Random(); - - - Object[] arr=instances.toArray(); - while(instances.size()>=30){ - - }*/ - // add the role to the filter(a solution is always EXISTS - // role.TOP) - m.addPredicateFilter(role); - //System.out.println(instances); - // THIS is a workaround - - } - // the actual extraction is started here - ont = m.extract(instances); - System.out.println("Number of cached SPARQL queries: "+m.getConfiguration().numberOfCachedSparqlQueries); - System.out.println("Number of uncached SPARQL queries: "+m.getConfiguration().numberOfUncachedSparqlQueries); - - System.out.println("Finished collecting Fragment"); - - if (dumpToFile) { - String filename = System.currentTimeMillis() + ".nt"; - String basedir = "cache" + File.separator; - try { - if (!new File(basedir).exists()) - new File(basedir).mkdir(); - - FileWriter fw = new FileWriter(new File(basedir + filename), true); - fw.write(ont); - fw.flush(); - fw.close(); - - dumpFile = (new File(basedir + filename)).toURI().toURL(); - } catch (Exception e) { - e.printStackTrace(); - } - } - if (format.equals("KB")) { - try { - //kb = KBParser.parseKBFile(new StringReader(ont)); - kb=KBParser.parseKBFile(dumpFile); - } catch (Exception e) { - e.printStackTrace(); - } - } - } catch (Exception e) { - e.printStackTrace(); - } - System.out.println("SparqlModul: ****Finished"); - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.KnowledgeSource#toDIG() - */ - @Override - public String toDIG(URI kbURI) { - if (format.equals("N-TRIPLES")) - return JenaOWLDIGConverter.getTellsString(dumpFile, OntologyFormat.N_TRIPLES, kbURI); - else - return DIGConverter.getDIGString(kb, kbURI).toString(); - } - - /* - * (non-Javadoc) - * - * @see org.dllearner.core.KnowledgeSource#export(java.io.File, - * org.dllearner.core.OntologyFormat) - */ - @Override - public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { - // currently no export functions implemented, so we just throw an - // exception - throw new OntologyFormatUnsupportedException("export", format); - } - - public URL getURL() { - return url; - } - - public String[] getOntArray() { - return ontArray; - } - - - /** - * TODO SparqlOntologyCollector needs to be removed - * @param label - * @param limit - */ - public void calculateSubjects(String label, int limit) { - System.out.println("SparqlModul: Collecting Subjects"); - oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - try { - subjects = oc.getSubjectsFromLabel(label, limit); - } catch (IOException e) { - subjects = new String[1]; - subjects[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - /** - * TODO SparqlOntologyCollector needs to be removed - * @param subject - */ - public void calculateTriples(String subject) { - System.out.println("SparqlModul: Collecting Triples"); - oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - try { - triples = oc.collectTriples(subject); - } catch (IOException e) { - triples = new String[1]; - triples[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - /** - * TODO SparqlOntologyCollector needs to be removed - * @param concept - */ - public void calculateConceptSubjects(String concept) { - System.out.println("SparqlModul: Collecting Subjects"); - oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); - try { - conceptSubjects = oc.getSubjectsFromConcept(concept); - } catch (IOException e) { - conceptSubjects = new String[1]; - conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; - } - System.out.println("SparqlModul: ****Finished"); - } - - public boolean subjectThreadIsRunning() { - return subjectThreadRunning; - } - - public void setSubjectThreadRunning(boolean bool) { - subjectThreadRunning = bool; - } - - public boolean triplesThreadIsRunning() { - return triplesThreadRunning; - } - - public void setTriplesThreadRunning(boolean bool) { - triplesThreadRunning = bool; - } - - public boolean conceptThreadIsRunning() { - return conceptThreadRunning; - } - - public void setConceptThreadRunning(boolean bool) { - conceptThreadRunning = bool; - } - - public String[] getSubjects() { - return subjects; - } - - public Thread getSubjectThread() { - return subjectThread; - } - - public void setSubjectThread(Thread subjectThread) { - this.subjectThread = subjectThread; - } - - public Thread getTriplesThread() { - return triplesThread; - } - - public void setTriplesThread(Thread triplesThread) { - this.triplesThread = triplesThread; - } - - public Thread getConceptThread() { - return conceptThread; - } - - public void setConceptThread(Thread conceptThread) { - this.conceptThread = conceptThread; - } - - public String[] getTriples() { - return triples; - } - - public String[] getConceptSubjects() { - return conceptSubjects; - } -} Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/Manager.java 2008-01-18 13:10:58 UTC (rev 392) @@ -28,7 +28,7 @@ import org.dllearner.kb.sparql.configuration.Configuration; import org.dllearner.kb.sparql.configuration.SparqlQueryType; -import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import org.dllearner.kb.sparql.datastructure.Node; import org.dllearner.kb.sparql.old.oldSparqlOntologyCollector; import org.dllearner.utilities.StringTuple; @@ -46,7 +46,7 @@ private ExtractionAlgorithm extractionAlgorithm; public void useConfiguration(SparqlQueryType SparqlQueryType, - SpecificSparqlEndpoint SparqlEndpoint, Manipulator manipulator, int recursiondepth, + SparqlEndpoint SparqlEndpoint, Manipulator manipulator, int recursiondepth, boolean getAllSuperClasses,boolean closeAfterRecursion) { this.configuration = new Configuration(SparqlEndpoint, SparqlQueryType, manipulator, Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java (from rev 391, trunk/src/dl-learner/org/dllearner/kb/SparqlKnowledgeSource.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-18 13:10:58 UTC (rev 392) @@ -0,0 +1,528 @@ +/** + * Copyright (C) 2007, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb.sparql; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URL; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.Set; + +import org.dllearner.core.KnowledgeSource; +import org.dllearner.core.OntologyFormat; +import org.dllearner.core.OntologyFormatUnsupportedException; +import org.dllearner.core.config.BooleanConfigOption; +import org.dllearner.core.config.ConfigEntry; +import org.dllearner.core.config.ConfigOption; +import org.dllearner.core.config.IntegerConfigOption; +import org.dllearner.core.config.InvalidConfigOptionValueException; +import org.dllearner.core.config.StringConfigOption; +import org.dllearner.core.config.StringSetConfigOption; +import org.dllearner.core.config.StringTupleListConfigOption; +import org.dllearner.core.dl.KB; +import org.dllearner.kb.sparql.configuration.PredefinedEndpoint; +import org.dllearner.kb.sparql.configuration.PredefinedFilter; +import org.dllearner.kb.sparql.configuration.SparqlQueryType; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; +import org.dllearner.kb.sparql.old.*; +import org.dllearner.kb.sparql.query.SparqlQuery; +import org.dllearner.parser.KBParser; +import org.dllearner.reasoning.DIGConverter; +import org.dllearner.reasoning.JenaOWLDIGConverter; +import org.dllearner.utilities.StringTuple; + +/** + * Represents a SPARQL Endpoint. + * + * @author Jens Lehmann + * @author Sebastian Knappe + * @author Sebastian Hellmann + */ +public class SparqlKnowledgeSource extends KnowledgeSource { + + // ConfigOptions + private URL url; + String host; + private Set<String> instances=new HashSet<String>();; + private URL dumpFile; + private int recursionDepth = 1; + private int predefinedFilter = 0; + private int predefinedEndpoint = 0; + private Set<String> predList=new HashSet<String>(); + private Set<String> objList=new HashSet<String>(); + // private Set<String> classList; + private String format = "N-TRIPLES"; + private boolean dumpToFile = true; + private boolean useLits = false; + private boolean getAllSuperClasses = true; + private boolean closeAfterRecursion = true; + private int breakSuperClassRetrievalAfter = 200; + + private boolean learnDomain = false; + private boolean learnRange = false; + private int numberOfInstancesUsedForRoleLearning=40; + private String role=""; + private String blankNodeIdentifier = "bnode"; + + LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); + LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); + LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); + + SparqlEndpoint sse = null; + + /** + * Holds the results of the calculateSubjects method + */ + private String[] subjects; + + /** + * Holds the results of the calculateTriples method + */ + private String[] triples; + + /** + * Holds the results of the calculateConceptSubjects method + */ + private String[] conceptSubjects; + + /** + * if a method is running this becomes true + */ + private boolean subjectThreadRunning = false; + + private boolean triplesThreadRunning = false; + + private boolean conceptThreadRunning = false; + + /** + * the Thread that is running a method + */ + private Thread subjectThread; + + private Thread triplesThread; + + private Thread conceptThread; + + // received ontology as array, used if format=Array(an element of the + // array consists of the subject, predicate and object separated by '<' + private String[] ontArray; + + // received ontology as KB, the internal format + private KB kb; + + public static String getName() { + return "SPARQL Endpoint Restructured"; + } + + /** + * sets the ConfigOptions for this KnowledgeSource + * + * @return + */ + public static Collection<ConfigOption<?>> createConfigOptions() { + Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); + options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); + options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); + options.add(new StringSetConfigOption("instances", + "relevant instances e.g. positive and negative examples in a learning problem")); + options.add(new IntegerConfigOption("recursionDepth", + "recursion depth of KB fragment selection", 2)); + options.add(new IntegerConfigOption("predefinedFilter", "the mode of the SPARQL Filter")); + options.add(new IntegerConfigOption("predefinedEndpoint", "the mode of the SPARQL Filter")); + + options.add(new StringSetConfigOption("predList", "list of all ignored roles")); + options.add(new StringSetConfigOption("objList", "list of all ignored objects")); + options.add(new StringSetConfigOption("classList", "list of all ignored classes")); + options.add(new StringConfigOption("format", "N-TRIPLES or KB format", "N-TRIPLES")); + options.add(new BooleanConfigOption("dumpToFile", + "Specifies whether the extracted ontology is written to a file or not.", true)); + options.add(new BooleanConfigOption("useLits", "use Literals in SPARQL query")); + options.add(new BooleanConfigOption("getAllSuperClasses", "If true then all superclasses are retrieved until the most general class (owl:Thing) is reached.", true)); + + options.add(new BooleanConfigOption("learnDomain", "learns the Domain for a Role")); + options.add(new BooleanConfigOption("learnRange", "learns the Range for a Role")); + options.add(new StringConfigOption("role", "role to learn Domain/Range from")); + options.add(new StringConfigOption("blankNodeIdentifier", + "used to identify blanknodes in Tripels")); + + options.add(new StringTupleListConfigOption("example", "example")); + options.add(new StringTupleListConfigOption("replacePredicate", "rule for replacing predicates")); + options.add(new StringTupleListConfigOption("replaceObject", "rule for replacing predicates")); + options.add(new IntegerConfigOption("breakSuperClassRetrievalAfter", "stops a cyclic hierarchy after specified number of classes")); + options.add(new IntegerConfigOption("numberOfInstancesUsedForRoleLearning", "")); + options.add(new BooleanConfigOption("closeAfterRecursion", "gets all classes for all instances")); + + + + return options; + } + + /* + * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) + */ + @Override + @SuppressWarnings( { "unchecked" }) + public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { + String option = entry.getOptionName(); + if (option.equals("url")) { + String s = (String) entry.getValue(); + try { + url = new URL(s); + } catch (MalformedURLException e) { + throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(), + "malformed URL " + s); + } + } else if (option.equals("host")) { + host = (String) entry.getValue(); + } else if (option.equals("instances")) { + instances = (Set<String>) entry.getValue(); + } else if (option.equals("recursionDepth")) { + recursionDepth = (Integer) entry.getValue(); + } else if (option.equals("predList")) { + predList = (Set<String>) entry.getValue(); + } else if (option.equals("objList")) { + objList = (Set<String>) entry.getValue(); + //} else if (option.equals("classList")) { + // classList = (Set<String>) entry.getValue(); + } else if (option.equals("predefinedEndpoint")) { + predefinedEndpoint = (Integer) entry.getValue(); + } else if (option.equals("predefinedFilter")) { + predefinedFilter = (Integer) entry.getValue(); + } else if (option.equals("format")) { + format = (String) entry.getValue(); + } else if (option.equals("dumpToFile")) { + dumpToFile = (Boolean) entry.getValue(); + } else if (option.equals("useLits")) { + useLits = (Boolean) entry.getValue(); + } else if (option.equals("getAllSuperClasses")) { + getAllSuperClasses = (Boolean) entry.getValue(); + } else if (option.equals("learnDomain")) { + learnDomain = (Boolean) entry.getValue(); + }else if (option.equals("learnRange")) { + learnRange = (Boolean) entry.getValue(); + } else if (option.equals("role")) { + role = (String) entry.getValue(); + } else if (option.equals("blankNodeIdentifier")) { + blankNodeIdentifier = (String) entry.getValue(); + } else if (option.equals("example")) { + //System.out.println(entry.getValue()); + }else if (option.equals("replacePredicate")) { + replacePredicate = (LinkedList)entry.getValue(); + }else if (option.equals("replaceObject")) { + replaceObject = (LinkedList)entry.getValue(); + }else if (option.equals("breakSuperClassRetrievalAfter")) { + breakSuperClassRetrievalAfter = (Integer) entry.getValue(); + }else if (option.equals("numberOfInstancesUsedForRoleLearning")) { + numberOfInstancesUsedForRoleLearning = (Integer) entry.getValue(); + }else if (option.equals("closeAfterRecursion")) { + closeAfterRecursion = (Boolean) entry.getValue(); + } + + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.Component#init() + */ + @Override + public void init() { + System.out.println("SparqlModul: Collecting Ontology"); + // SparqlOntologyCollector oc= + // new SparqlOntologyCollector(Datastructures.setToArray(instances), + // numberOfRecursions, filterMode, + // Datastructures.setToArray(predList),Datastructures.setToArray( + // objList),Datastructures.setToArray(classList),format,url,useLits); + Manager m = new Manager(); + SparqlQueryType sqt = null; + // get Options for Manipulator + Manipulator man = new Manipulator(blankNodeIdentifier,breakSuperClassRetrievalAfter,replacePredicate,replaceObject); + HashMap<String, String> parameters = new HashMap<String, String>(); + parameters.put("default-graph-uri", "http://dbpedia.org"); + parameters.put("format", "application/sparql-results.xml"); + + // get Options for endpoints + if (predefinedEndpoint >= 1) { + sse = PredefinedEndpoint.getEndpoint(predefinedEndpoint); + } else { + sse = new SparqlEndpoint(url, host, parameters); + } + + // get Options for Filters + + if (predefinedFilter >= 1) { + sqt = PredefinedFilter.getFilter(predefinedFilter); + + } else { + sqt = new SparqlQueryType("forbid", objList, predList, useLits + ""); + + } + // give everything to the manager + m.useConfiguration(sqt, sse, man, recursionDepth, getAllSuperClasses,closeAfterRecursion); + try { + String ont = ""; + //System.out.println(learnDomain); + // used to learn a domain of a role + if (learnDomain || learnRange) { + Set<String> pos=new HashSet<String>(); + Set<String> neg=new HashSet<String>(); + if(learnDomain){ + pos = m.getDomainInstancesForRole(role); + neg = m.getRangeInstancesForRole(role); + }else if(learnRange){ + neg = m.getDomainInstancesForRole(role); + pos = m.getRangeInstancesForRole(role); + } + //choose 30 + + + Set<String> tmp=new HashSet<String>(); + for(String one:pos){ + tmp.add(one); + if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; + } + pos=tmp; + System.out.println("Instances used: "+pos.size()); + + tmp=new HashSet<String>(); + for(String one:neg){ + tmp.add(one); + if(tmp.size()>=numberOfInstancesUsedForRoleLearning)break; + } + neg=tmp; + + instances=new HashSet<String>(); + instances.addAll(pos); + + instances.addAll(neg); + + for(String one:pos){ + System.out.println("+\""+one+"\""); + } + for(String one:neg){ + System.out.println("-\""+one+"\""); + } + + /*Random r= new Random(); + + + Object[] arr=instances.toArray(); + while(instances.size()>=30){ + + }*/ + // add the role to the filter(a solution is always EXISTS + // role.TOP) + m.addPredicateFilter(role); + //System.out.println(instances); + // THIS is a workaround + + } + // the actual extraction is started here + ont = m.extract(instances); + System.out.println("Number of cached SPARQL queries: "+m.getConfiguration().numberOfCachedSparqlQueries); + System.out.println("Number of uncached SPARQL queries: "+m.getConfiguration().numberOfUncachedSparqlQueries); + + System.out.println("Finished collecting Fragment"); + + if (dumpToFile) { + String filename = System.currentTimeMillis() + ".nt"; + String basedir = "cache" + File.separator; + try { + if (!new File(basedir).exists()) + new File(basedir).mkdir(); + + FileWriter fw = new FileWriter(new File(basedir + filename), true); + fw.write(ont); + fw.flush(); + fw.close(); + + dumpFile = (new File(basedir + filename)).toURI().toURL(); + } catch (Exception e) { + e.printStackTrace(); + } + } + if (format.equals("KB")) { + try { + //kb = KBParser.parseKBFile(new StringReader(ont)); + kb=KBParser.parseKBFile(dumpFile); + } catch (Exception e) { + e.printStackTrace(); + } + } + } catch (Exception e) { + e.printStackTrace(); + } + System.out.println("SparqlModul: ****Finished"); + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.KnowledgeSource#toDIG() + */ + @Override + public String toDIG(URI kbURI) { + if (format.equals("N-TRIPLES")) + return JenaOWLDIGConverter.getTellsString(dumpFile, OntologyFormat.N_TRIPLES, kbURI); + else + return DIGConverter.getDIGString(kb, kbURI).toString(); + } + + /* + * (non-Javadoc) + * + * @see org.dllearner.core.KnowledgeSource#export(java.io.File, + * org.dllearner.core.OntologyFormat) + */ + @Override + public void export(File file, OntologyFormat format) throws OntologyFormatUnsupportedException { + // currently no export functions implemented, so we just throw an + // exception + throw new OntologyFormatUnsupportedException("export", format); + } + + public URL getURL() { + return url; + } + + public String[] getOntArray() { + return ontArray; + } + + + /** + * TODO SparqlOntologyCollector needs to be removed + * @param label + * @param limit + */ + public void calculateSubjects(String label, int limit) { + System.out.println("SparqlModul: Collecting Subjects"); + oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); + try { + subjects = oc.getSubjectsFromLabel(label, limit); + } catch (IOException e) { + subjects = new String[1]; + subjects[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + /** + * TODO SparqlOntologyCollector needs to be removed + * @param subject + */ + public void calculateTriples(String subject) { + System.out.println("SparqlModul: Collecting Triples"); + oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); + try { + triples = oc.collectTriples(subject); + } catch (IOException e) { + triples = new String[1]; + triples[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + /** + * TODO SparqlOntologyCollector needs to be removed + * @param concept + */ + public void calculateConceptSubjects(String concept) { + System.out.println("SparqlModul: Collecting Subjects"); + oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); + try { + conceptSubjects = oc.getSubjectsFromConcept(concept); + } catch (IOException e) { + conceptSubjects = new String[1]; + conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; + } + System.out.println("SparqlModul: ****Finished"); + } + + public boolean subjectThreadIsRunning() { + return subjectThreadRunning; + } + + public void setSubjectThreadRunning(boolean bool) { + subjectThreadRunning = bool; + } + + public boolean triplesThreadIsRunning() { + return triplesThreadRunning; + } + + public void setTriplesThreadRunning(boolean bool) { + triplesThreadRunning = bool; + } + + public boolean conceptThreadIsRunning() { + return conceptThreadRunning; + } + + public void setConceptThreadRunning(boolean bool) { + conceptThreadRunning = bool; + } + + public String[] getSubjects() { + return subjects; + } + + public Thread getSubjectThread() { + return subjectThread; + } + + public void setSubjectThread(Thread subjectThread) { + this.subjectThread = subjectThread; + } + + public Thread getTriplesThread() { + return triplesThread; + } + + public void setTriplesThread(Thread triplesThread) { + this.triplesThread = triplesThread; + } + + public Thread getConceptThread() { + return conceptThread; + } + + public void setConceptThread(Thread conceptThread) { + this.conceptThread = conceptThread; + } + + public String[] getTriples() { + return triples; + } + + public String[] getConceptSubjects() { + return conceptSubjects; + } + + public SparqlQuery sparqlQuery(String query) { + return new SparqlQuery(sse, query); + } +} Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQuery.java 2008-01-18 13:10:58 UTC (rev 392) @@ -33,7 +33,7 @@ import java.util.Set; import org.dllearner.kb.sparql.configuration.Configuration; -import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import org.dllearner.kb.sparql.query.Cache; import org.dllearner.kb.sparql.query.CachedSparqlQuery; import org.dllearner.kb.sparql.query.SparqlQuery; Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/TypedSparqlQueryClasses.java 2008-01-18 13:10:58 UTC (rev 392) @@ -33,7 +33,7 @@ import java.util.Set; import org.dllearner.kb.sparql.configuration.Configuration; -import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import org.dllearner.kb.sparql.query.Cache; import org.dllearner.utilities.StringTuple; @@ -193,7 +193,7 @@ // String an Sparql-Endpoint schicken HttpURLConnection connection; - SpecificSparqlEndpoint se = configuration.getSparqlEndpoint(); + SparqlEndpoint se = configuration.getSparqlEndpoint(); p("URL: "+se.getURL()); p("Host: "+se.getHost()); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/Configuration.java 2008-01-18 13:10:58 UTC (rev 392) @@ -33,7 +33,7 @@ * see the other classes, which are used as attributes here * */ - private SpecificSparqlEndpoint specificSparqlEndpoint; + private SparqlEndpoint specificSparqlEndpoint; private SparqlQueryType sparqlQueryType; private Manipulator manipulator; // the following needs to be moved to @@ -44,7 +44,7 @@ public int numberOfUncachedSparqlQueries=0; public int numberOfCachedSparqlQueries=0; - public Configuration(SpecificSparqlEndpoint specificSparqlEndpoint, + public Configuration(SparqlEndpoint specificSparqlEndpoint, SparqlQueryType sparqlQueryType, Manipulator manipulator, int recursiondepth, boolean getAllSuperClasses, boolean closeAfterRecursion) { this.specificSparqlEndpoint = specificSparqlEndpoint; @@ -67,7 +67,7 @@ return this.manipulator; } - public SpecificSparqlEndpoint getSparqlEndpoint() { + public SparqlEndpoint getSparqlEndpoint() { return specificSparqlEndpoint; } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedEndpoint.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/PredefinedEndpoint.java 2008-01-18 13:10:58 UTC (rev 392) @@ -30,7 +30,7 @@ * */ public class PredefinedEndpoint { - public static SpecificSparqlEndpoint getEndpoint(int i) { + public static SparqlEndpoint getEndpoint(int i) { switch (i) { case 1: @@ -49,7 +49,7 @@ return null; } - public static SpecificSparqlEndpoint dbpediaEndpoint() { + public static SparqlEndpoint dbpediaEndpoint() { URL u = null; HashMap<String, String> m = new HashMap<String, String>(); m.put("default-graph-uri", "http://dbpedia.org"); @@ -59,10 +59,10 @@ } catch (Exception e) { e.printStackTrace(); } - return new SpecificSparqlEndpoint(u, "dbpedia.openlinksw.com", m); + return new SparqlEndpoint(u, "dbpedia.openlinksw.com", m); } - public static SpecificSparqlEndpoint localJoseki() { + public static SparqlEndpoint localJoseki() { URL u = null; HashMap<String, String> m = new HashMap<String, String>(); // m.put("default-graph-uri", "http://dbpedia.org"); @@ -72,9 +72,9 @@ } catch (Exception e) { e.printStackTrace(); } - return new SpecificSparqlEndpoint(u, "localhost", m); + return new SparqlEndpoint(u, "localhost", m); } - public static SpecificSparqlEndpoint worldFactBook() { + public static SparqlEndpoint worldFactBook() { URL u = null; HashMap<String, String> m = new HashMap<String, String>(); // m.put("default-graph-uri", "http://dbpedia.org"); @@ -84,7 +84,7 @@ } catch (Exception e) { e.printStackTrace(); } - return new SpecificSparqlEndpoint(u, "www4.wiwiss.fu-berlin.de", m); + return new SparqlEndpoint(u, "www4.wiwiss.fu-berlin.de", m); } /* @@ -102,7 +102,7 @@ return new SpecificSparqlEndpoint(u, "www4.wiwiss.fu-berlin.de", m); } */ - public static SpecificSparqlEndpoint govTrack() { + public static SparqlEndpoint govTrack() { URL u = null; HashMap<String, String> m = new HashMap<String, String>(); // m.put("default-graph-uri", "http://dbpedia.org"); @@ -112,9 +112,9 @@ } catch (Exception e) { e.printStackTrace(); } - return new SpecificSparqlEndpoint(u, "www.rdfabout.com", m); + return new SparqlEndpoint(u, "www.rdfabout.com", m); } - public static SpecificSparqlEndpoint revyu() { + public static SparqlEndpoint revyu() { URL u = null; HashMap<String, String> m = new HashMap<String, String>(); // m.put("default-graph-uri", "http://dbpedia.org"); @@ -125,7 +125,7 @@ } catch (Exception e) { e.printStackTrace(); } - return new SpecificSparqlEndpoint(u, "revyu.com", m); + return new SparqlEndpoint(u, "revyu.com", m); } // returns strange xml @@ -151,7 +151,7 @@ return new SpecificSparqlEndpoint(u, "dbtune.org", m); }*/ - public static SpecificSparqlEndpoint myopenlink() { + public static SparqlEndpoint myopenlink() { URL u = null; HashMap<String, String> m = new HashMap<String, String>(); m.put("default-graph-uri", "http://myopenlink.net/dataspace"); @@ -162,7 +162,7 @@ } catch (Exception e) { e.printStackTrace(); } - return new SpecificSparqlEndpoint(u, "myopenlink.net", m); + return new SparqlEndpoint(u, "myopenlink.net", m); } } Copied: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java (from rev 391, trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SpecificSparqlEndpoint.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SparqlEndpoint.java 2008-01-18 13:10:58 UTC (rev 392) @@ -0,0 +1,74 @@ +/** + * Copyright (C) 2007, Sebastian Hellmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.kb.sparql.configuration; + +import java.net.URL; +import java.util.HashMap; + +/** + * One sparql endpoint configuration. + * + * @author Sebastian Hellmann + * + */ +public class SparqlEndpoint { + + String host; + String hasQueryParameter; + URL url; + public HashMap<String, String> parameters = new HashMap<String, String>(); + + public SparqlEndpoint(URL url, String host, HashMap<String, String> parameters) { + super(); + this.host = host; + this.url = url; + this.hasQueryParameter = "query"; + this.parameters = parameters; + } + + public String getHasQueryParameter() { + return hasQueryParameter; + } + + public void setHasQueryParameter(String hasQueryParameter) { + this.hasQueryParameter = hasQueryParameter; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public HashMap<String, String> getParameters() { + return parameters; + } + + public void setParameters(HashMap<String, String> parameters) { + this.parameters = parameters; + } + + public URL getURL() { + return this.url; + } + +} Deleted: trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SpecificSparqlEndpoint.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SpecificSparqlEndpoint.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/configuration/SpecificSparqlEndpoint.java 2008-01-18 13:10:58 UTC (rev 392) @@ -1,74 +0,0 @@ -/** - * Copyright (C) 2007, Sebastian Hellmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ -package org.dllearner.kb.sparql.configuration; - -import java.net.URL; -import java.util.HashMap; - -/** - * One sparql endpoint configuration. - * - * @author Sebastian Hellmann - * - */ -public class SpecificSparqlEndpoint { - - String host; - String hasQueryParameter; - URL url; - public HashMap<String, String> parameters = new HashMap<String, String>(); - - public SpecificSparqlEndpoint(URL url, String host, HashMap<String, String> parameters) { - super(); - this.host = host; - this.url = url; - this.hasQueryParameter = "query"; - this.parameters = parameters; - } - - public String getHasQueryParameter() { - return hasQueryParameter; - } - - public void setHasQueryParameter(String hasQueryParameter) { - this.hasQueryParameter = hasQueryParameter; - } - - public String getHost() { - return host; - } - - public void setHost(String host) { - this.host = host; - } - - public HashMap<String, String> getParameters() { - return parameters; - } - - public void setParameters(HashMap<String, String> parameters) { - this.parameters = parameters; - } - - public URL getURL() { - return this.url; - } - -} Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/CachedSparqlQuery.java 2008-01-18 13:10:58 UTC (rev 392) @@ -2,7 +2,7 @@ import java.net.URI; -import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.query.ResultSetFormatter; @@ -14,7 +14,7 @@ SparqlQuery sparqlQuery; boolean debug_no_cache=false; - public CachedSparqlQuery(SpecificSparqlEndpoint endpoint,Cache c) { + public CachedSparqlQuery(SparqlEndpoint endpoint,Cache c) { //this.specificSparqlEndpoint=endpoint; this.sparqlQuery=new SparqlQuery(endpoint); this.cache=c; Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/JenaTestScript.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/JenaTestScript.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/JenaTestScript.java 2008-01-18 13:10:58 UTC (rev 392) @@ -20,7 +20,7 @@ package org.dllearner.kb.sparql.query; import org.dllearner.kb.sparql.configuration.PredefinedEndpoint; -import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQuery.java 2008-01-18 13:10:58 UTC (rev 392) @@ -22,7 +22,7 @@ import java.util.ArrayList; import java.util.List; -import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; @@ -35,21 +35,58 @@ * Represents a SPARQL query. It includes support for stopping the SPARQL * query (which may be necessary if a timeout is reached). * - * TODO: It is probably good to change all SPARQL query calls to use only - * this class. - * - * TODO: Could we use Jena as a solid foundation here? (com.hp.jena.query) - * * @author Jens Lehmann * */ public class SparqlQuery extends SparqlQueryAbstract{ - public SparqlQuery(SpecificSparqlEndpoint endpoint) { + private boolean isRunning = false; + private String queryString; + private QueryExecution queryExecution; + + public SparqlQuery(SparqlEndpoint endpoint, String queryString) { super(endpoint); - // TODO Auto-generated constructor stub + this.queryString = queryString; } + + public ResultSet send() { + isRunning = true; + + p(queryString); + // create a query and parse it into Jena + Query query = QueryFactory.create(queryString); + // query.validate(); + // Jena access to DBpedia SPARQL endpoint + String service=specificSparqlEndpoint.getURL().toString(); + + // TODO: the graph uri should be a parameter of SparqlQuery + ArrayList<String> al=new ArrayList<String>(); + al.add("http://dbpedia.org"); + QueryExecution queryExecution = + QueryExecutionFactory.sparqlService(service, query, al, new ArrayList<String>()); + p("query SPARQL server"); + ResultSet rs = queryExecution.execSelect(); + isRunning = false; + return rs; + } + + public void stop() { + queryExecution.abort(); + isRunning = false; + } + public boolean isRunning() { + return isRunning; + } + + + + // CODE BY SEBASTIAN H. BELOW // + + public SparqlQuery(SparqlEndpoint endpoint) { + super(endpoint); + } + private ResultSet sendAndReceive(String queryString){ p(queryString); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryAbstract.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryAbstract.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryAbstract.java 2008-01-18 13:10:58 UTC (rev 392) @@ -1,33 +1,15 @@ package org.dllearner.kb.sparql.query; -import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; public abstract class SparqlQueryAbstract { - private boolean isRunning = false; boolean print_flag=false; - SpecificSparqlEndpoint specificSparqlEndpoint; + SparqlEndpoint specificSparqlEndpoint; - public SparqlQueryAbstract(SpecificSparqlEndpoint endpoint) { + public SparqlQueryAbstract(SparqlEndpoint endpoint) { this.specificSparqlEndpoint=endpoint; } - public void send() { - isRunning = true; - - // ... send query - // ... check periodically whether isRunning is still true, if not - // abort the query - } - - public void stop() { - isRunning = false; - } - - public boolean isRunning() { - return isRunning; - } - - public abstract String getAsXMLString(String queryString); //public abstract String getAsXMLString(String queryString); Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryConventional.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryConventional.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/SparqlQueryConventional.java 2008-01-18 13:10:58 UTC (rev 392) @@ -11,7 +11,7 @@ import java.util.Iterator; import java.util.Set; -import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.query.ResultSetFormatter; @@ -22,7 +22,7 @@ - public SparqlQueryConventional(SpecificSparqlEndpoint specificSparqlEndpoint) { + public SparqlQueryConventional(SparqlEndpoint specificSparqlEndpoint) { super(specificSparqlEndpoint); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/query/TestSparqlQuery.java 2008-01-18 13:10:58 UTC (rev 392) @@ -20,7 +20,7 @@ package org.dllearner.kb.sparql.query; import org.dllearner.kb.sparql.configuration.PredefinedEndpoint; -import org.dllearner.kb.sparql.configuration.SpecificSparqlEndpoint; +import org.dllearner.kb.sparql.configuration.SparqlEndpoint; import com.hp.hpl.jena.query.Query; import com.hp.hpl.jena.query.QueryExecution; @@ -65,7 +65,7 @@ } public static void testTime(int howOften, String queryString){ - SpecificSparqlEndpoint sse= PredefinedEndpoint.dbpediaEndpoint(); + SparqlEndpoint sse= PredefinedEndpoint.dbpediaEndpoint(); SparqlQuery sqJena=new SparqlQuery(sse); SparqlQueryConventional sqConv=new SparqlQueryConventional(sse); @@ -97,7 +97,7 @@ } public static void compareResults( String queryString){ - SpecificSparqlEndpoint sse= PredefinedEndpoint.dbpediaEndpoint(); + SparqlEndpoint sse= PredefinedEndpoint.dbpediaEndpoint(); SparqlQuery sqJena=new SparqlQuery(sse); SparqlQueryConventional sqConv=new SparqlQueryConventional(sse); Modified: trunk/src/dl-learner/org/dllearner/server/ClientState.java =================================================================== --- trunk/src/dl-learner/org/dllearner/server/ClientState.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/server/ClientState.java 2008-01-18 13:10:58 UTC (rev 392) @@ -33,7 +33,7 @@ import org.dllearner.core.ReasonerComponent; import org.dllearner.core.ReasoningService; import org.dllearner.kb.OWLFile; -import org.dllearner.kb.SparqlKnowledgeSource; +import org.dllearner.kb.sparql.SparqlKnowledgeSource; /** * Stores the state of a DL-Learner client session. Modified: trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java =================================================================== --- trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java 2008-01-18 11:59:56 UTC (rev 391) +++ trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java 2008-01-18 13:10:58 UTC (rev 392) @@ -46,7 +46,7 @@ import org.dllearner.core.dl.Concept; import org.dllearner.core.dl.Individual; import org.dllearner.kb.OWLFile; -import org.dllearner.kb.SparqlKnowledgeSource; +import org.dllearner.kb.sparql.SparqlKnowledgeSource; import org.dllearner.learningproblems.PosNegDefin... [truncated message content] |
From: <jen...@us...> - 2008-01-19 10:16:34
|
Revision: 399 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=399&view=rev Author: jenslehmann Date: 2008-01-19 02:16:30 -0800 (Sat, 19 Jan 2008) Log Message: ----------- - added JUnit library for unit tests (please update your classpath) - implemented first unit test - added missing getName() methods Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/algorithms/BruteForceLearner.java trunk/src/dl-learner/org/dllearner/algorithms/RandomGuesser.java trunk/src/dl-learner/org/dllearner/algorithms/gp/GP.java trunk/src/dl-learner/org/dllearner/reasoning/FastRetrievalReasoner.java trunk/src/dl-learner/org/dllearner/reasoning/KAON2Reasoner.java Added Paths: ----------- trunk/lib/junit-4.4.jar trunk/src/dl-learner/org/dllearner/test/ trunk/src/dl-learner/org/dllearner/test/AllTestsRunner.java trunk/src/dl-learner/org/dllearner/test/ComponentTests.java trunk/src/dl-learner/org/dllearner/test/package.html Added: trunk/lib/junit-4.4.jar =================================================================== (Binary files differ) Property changes on: trunk/lib/junit-4.4.jar ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Modified: trunk/src/dl-learner/org/dllearner/algorithms/BruteForceLearner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/BruteForceLearner.java 2008-01-19 08:11:27 UTC (rev 398) +++ trunk/src/dl-learner/org/dllearner/algorithms/BruteForceLearner.java 2008-01-19 10:16:30 UTC (rev 399) @@ -72,6 +72,10 @@ this.learningProblem = learningProblem; } + public static String getName() { + return "brute force learning algorithm"; + } + public static Collection<Class<? extends LearningProblem>> supportedLearningProblems() { Collection<Class<? extends LearningProblem>> problems = new LinkedList<Class<? extends LearningProblem>>(); problems.add(LearningProblem.class); Modified: trunk/src/dl-learner/org/dllearner/algorithms/RandomGuesser.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/RandomGuesser.java 2008-01-19 08:11:27 UTC (rev 398) +++ trunk/src/dl-learner/org/dllearner/algorithms/RandomGuesser.java 2008-01-19 10:16:30 UTC (rev 399) @@ -47,6 +47,10 @@ this.learningProblem = learningProblem; } + public static String getName() { + return "random guesser learning algorithm"; + } + public static Collection<Class<? extends LearningProblem>> supportedLearningProblems() { Collection<Class<? extends LearningProblem>> problems = new LinkedList<Class<? extends LearningProblem>>(); problems.add(LearningProblem.class); Modified: trunk/src/dl-learner/org/dllearner/algorithms/gp/GP.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/gp/GP.java 2008-01-19 08:11:27 UTC (rev 398) +++ trunk/src/dl-learner/org/dllearner/algorithms/gp/GP.java 2008-01-19 10:16:30 UTC (rev 399) @@ -140,7 +140,11 @@ this.learningProblem = learningProblem; this.rs = rs; } - + + public static String getName() { + return "genetic programming learning algorithm"; + } + public static Collection<Class<? extends LearningProblem>> supportedLearningProblems() { Collection<Class<? extends LearningProblem>> problems = new LinkedList<Class<? extends LearningProblem>>(); problems.add(PosNegLP.class); Modified: trunk/src/dl-learner/org/dllearner/reasoning/FastRetrievalReasoner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/FastRetrievalReasoner.java 2008-01-19 08:11:27 UTC (rev 398) +++ trunk/src/dl-learner/org/dllearner/reasoning/FastRetrievalReasoner.java 2008-01-19 10:16:30 UTC (rev 399) @@ -83,6 +83,10 @@ // hier muss nichts getan werden } + public static String getName() { + return "fast retrieval reasoner"; + } + /* (non-Javadoc) * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) */ Modified: trunk/src/dl-learner/org/dllearner/reasoning/KAON2Reasoner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/KAON2Reasoner.java 2008-01-19 08:11:27 UTC (rev 398) +++ trunk/src/dl-learner/org/dllearner/reasoning/KAON2Reasoner.java 2008-01-19 10:16:30 UTC (rev 399) @@ -244,6 +244,10 @@ } */ + public static String getName() { + return "KAON2 reasoner"; + } + // TODO: hier werden momentan keine allowed concepts berücksichtigt // (benötigt rekursive Aufrufe, da ein erlaubtes Konzept von einem nicht // erlaubten verdeckt werden könnte) Added: trunk/src/dl-learner/org/dllearner/test/AllTestsRunner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/test/AllTestsRunner.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/test/AllTestsRunner.java 2008-01-19 10:16:30 UTC (rev 399) @@ -0,0 +1,39 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.test; + +import org.junit.runner.JUnitCore; + +/** + * Class designed to run all DL-Learner component tests. Note, + * that in Eclipse (and similar in other IDEs) you can run + * JUnit tests by clicking on a file containing methods annotated + * with @Test and "Run As JUnit Test". + * + * @author Jens Lehmann + * + */ +public class AllTestsRunner { + + public static void main(String[] args) { + JUnitCore.main("org.dllearner.test.ComponentTests"); + } + +} Added: trunk/src/dl-learner/org/dllearner/test/ComponentTests.java =================================================================== --- trunk/src/dl-learner/org/dllearner/test/ComponentTests.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/test/ComponentTests.java 2008-01-19 10:16:30 UTC (rev 399) @@ -0,0 +1,54 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.test; + +import java.util.List; + +import org.dllearner.core.Component; +import org.dllearner.core.ComponentManager; +import org.junit.Test; +import static org.junit.Assert.*; + +/** + * A suite of JUnit tests related to the DL-Learner component architecture. + * + * @author Jens Lehmann + * + */ +public class ComponentTests { + + /** + * Checks whether all components implement the getName() method. While it + * cannot be enforced to implement a static method, it should be done (e.g. + * to be used as label for the component in GUIs). + */ + @Test + public void nameTest() { + String defaultName = Component.getName(); + ComponentManager cm = ComponentManager.getInstance(); + List<Class<? extends Component>> components = cm.getComponents(); + for (Class<? extends Component> component : components) { + String componentName = cm.getComponentName(component); + assertFalse(component + " does not overwrite getName().", componentName + .equals(defaultName)); + } + } + +} Added: trunk/src/dl-learner/org/dllearner/test/package.html =================================================================== --- trunk/src/dl-learner/org/dllearner/test/package.html (rev 0) +++ trunk/src/dl-learner/org/dllearner/test/package.html 2008-01-19 10:16:30 UTC (rev 399) @@ -0,0 +1,7 @@ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN"> +<html> +<head></head> +<body bgcolor="white"> +<p>DL-Learner JUnit tests.</p> +</body> +</html> \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-01-19 14:32:51
|
Revision: 400 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=400&view=rev Author: jenslehmann Date: 2008-01-19 06:32:48 -0800 (Sat, 19 Jan 2008) Log Message: ----------- - added log4j logging framework (update your classpath) - added logging initialisation for all interfaces: CLI, GUI, Web-Service; consider using log statements instead of System.out.println in the future - some smaller fixes Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/algorithms/RandomGuesser.java trunk/src/dl-learner/org/dllearner/cli/Start.java trunk/src/dl-learner/org/dllearner/gui/StartGUI.java trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java trunk/src/dl-learner/org/dllearner/server/DLLearnerWSStart.java Added Paths: ----------- trunk/lib/log4j.jar Added: trunk/lib/log4j.jar =================================================================== (Binary files differ) Property changes on: trunk/lib/log4j.jar ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Modified: trunk/src/dl-learner/org/dllearner/algorithms/RandomGuesser.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/RandomGuesser.java 2008-01-19 10:16:30 UTC (rev 399) +++ trunk/src/dl-learner/org/dllearner/algorithms/RandomGuesser.java 2008-01-19 14:32:48 UTC (rev 400) @@ -22,6 +22,7 @@ import java.util.Collection; import java.util.LinkedList; +import org.apache.log4j.Logger; import org.dllearner.algorithms.gp.Program; import org.dllearner.algorithms.gp.GPUtilities; import org.dllearner.core.LearningAlgorithm; @@ -43,6 +44,8 @@ private int numberOfTrees; private int maxDepth; + private static Logger logger = Logger.getLogger(RandomGuesser.class); + public RandomGuesser(LearningProblem learningProblem, ReasoningService rs) { this.learningProblem = learningProblem; } @@ -103,12 +106,9 @@ } } - System.out.print("Random-Guesser (" + numberOfTrees + " trials, "); - System.out.println("maximum depth " + maxDepth + ")"); - System.out.println("best solution: " + bestDefinition); - System.out.println("fitness: " + bestFitness); - - // System.out.println(bestScore); + logger.info("Random-Guesser (" + numberOfTrees + " trials, maximum depth " + maxDepth + ")"); + logger.info("best solution: " + bestDefinition); + logger.info("fitness: " + bestFitness); } @Override Modified: trunk/src/dl-learner/org/dllearner/cli/Start.java =================================================================== --- trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-19 10:16:30 UTC (rev 399) +++ trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-19 14:32:48 UTC (rev 400) @@ -34,6 +34,12 @@ import java.util.SortedSet; import java.util.TreeSet; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.SimpleLayout; +import org.dllearner.algorithms.BruteForceLearner; +import org.dllearner.algorithms.RandomGuesser; import org.dllearner.algorithms.gp.GP; import org.dllearner.algorithms.refinement.ROLearner; import org.dllearner.core.Component; @@ -92,11 +98,20 @@ public static void main(String[] args) { File file = new File(args[args.length - 1]); String baseDir = file.getParentFile().getPath(); - + boolean inQueryMode = false; if (args.length > 1 && args[0].equals("-q")) inQueryMode = true; + // create logger (a simple logger which outputs + // its messages to the console) + SimpleLayout layout = new SimpleLayout(); + ConsoleAppender consoleAppender = new ConsoleAppender(layout); + Logger logger = Logger.getRootLogger(); + logger.removeAllAppenders(); + logger.addAppender(consoleAppender); + logger.setLevel(Level.INFO); + // create component manager instance System.out.print("starting component manager ... "); long cmStartTime = System.nanoTime(); @@ -173,19 +188,16 @@ laClass = ROLearner.class; else if(algorithmOption.getStringValue().equals("gp")) laClass = GP.class; + else if(algorithmOption.getStringValue().equals("bruteForce")) + laClass = BruteForceLearner.class; + else if(algorithmOption.getStringValue().equals("randomGuesser")) + laClass = RandomGuesser.class; else handleError("Unknown value in " + algorithmOption); la = cm.learningAlgorithm(laClass, lp, rs); configureComponent(cm, la, componentPrefixMapping, parser); initComponent(cm, la); - - // initialise all structures -// for (KnowledgeSource source : sources) -// initComponent(cm, source); -// initComponent(cm, reasoner); -// initComponent(cm, lp); -// initComponent(cm, la); // perform file exports performExports(parser, baseDir, sources, rs); Modified: trunk/src/dl-learner/org/dllearner/gui/StartGUI.java =================================================================== --- trunk/src/dl-learner/org/dllearner/gui/StartGUI.java 2008-01-19 10:16:30 UTC (rev 399) +++ trunk/src/dl-learner/org/dllearner/gui/StartGUI.java 2008-01-19 14:32:48 UTC (rev 400) @@ -22,6 +22,11 @@ import javax.swing.*; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.SimpleLayout; + /** * StartGUI * @@ -64,6 +69,14 @@ } public static void main(String[] args) { + // create GUI logger + SimpleLayout layout = new SimpleLayout(); + ConsoleAppender consoleAppender = new ConsoleAppender(layout); + Logger logger = Logger.getRootLogger(); + logger.removeAllAppenders(); + logger.addAppender(consoleAppender); + logger.setLevel(Level.INFO); + myrun = new StartGUI(); } Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-19 10:16:30 UTC (rev 399) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlKnowledgeSource.java 2008-01-19 14:32:48 UTC (rev 400) @@ -30,10 +30,12 @@ import java.util.Set; import java.util.Vector; +import org.apache.log4j.Logger; import org.dllearner.core.KnowledgeSource; import org.dllearner.core.OntologyFormat; import org.dllearner.core.OntologyFormatUnsupportedException; import org.dllearner.core.config.BooleanConfigOption; +import org.dllearner.core.config.CommonConfigOptions; import org.dllearner.core.config.ConfigEntry; import org.dllearner.core.config.ConfigOption; import org.dllearner.core.config.IntegerConfigOption; @@ -61,7 +63,7 @@ // ConfigOptions private URL url; - String host; + // String host; private Set<String> instances = new HashSet<String>();; private URL dumpFile; private int recursionDepth = 1; @@ -82,7 +84,8 @@ private int numberOfInstancesUsedForRoleLearning = 40; private String role = ""; private String blankNodeIdentifier = "bnode"; - +// private String verbosity = "warning"; + //LinkedList<StringTuple> URIParameters = new LinkedList<StringTuple>(); LinkedList<StringTuple> replacePredicate = new LinkedList<StringTuple>(); LinkedList<StringTuple> replaceObject = new LinkedList<StringTuple>(); @@ -130,9 +133,11 @@ private KB kb; public static String getName() { - return "SPARQL Endpoint Restructured"; + return "SPARQL Endpoint"; } + private static Logger logger = Logger.getLogger(SparqlKnowledgeSource.class); + /** * sets the ConfigOptions for this KnowledgeSource * @@ -141,7 +146,7 @@ public static Collection<ConfigOption<?>> createConfigOptions() { Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); options.add(new StringConfigOption("url", "URL of SPARQL Endpoint")); - options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); +// options.add(new StringConfigOption("host", "host of SPARQL Endpoint")); options .add(new StringSetConfigOption("instances", "relevant instances e.g. positive and negative examples in a learning problem")); @@ -193,6 +198,7 @@ "numberOfInstancesUsedForRoleLearning", "")); options.add(new BooleanConfigOption("closeAfterRecursion", "gets all classes for all instances")); + options.add(CommonConfigOptions.getVerbosityOption()); return options; } @@ -213,8 +219,8 @@ throw new InvalidConfigOptionValueException(entry.getOption(), entry.getValue(), "malformed URL " + s); } - } else if (option.equals("host")) { - host = (String) entry.getValue(); +// } else if (option.equals("host")) { +// host = (String) entry.getValue(); } else if (option.equals("instances")) { instances = (Set<String>) entry.getValue(); } else if (option.equals("recursionDepth")) { @@ -257,6 +263,8 @@ numberOfInstancesUsedForRoleLearning = (Integer) entry.getValue(); } else if (option.equals("closeAfterRecursion")) { closeAfterRecursion = (Boolean) entry.getValue(); +// } else if (option.equals("verbosity")) { +// verbosity = (String) entry.getValue(); } } @@ -268,7 +276,7 @@ */ @Override public void init() { - System.out.println("SparqlModul: Collecting Ontology"); + logger.info("SparqlModul: Collecting Ontology"); // SparqlOntologyCollector oc= // new SparqlOntologyCollector(Datastructures.setToArray(instances), // numberOfRecursions, filterMode, @@ -327,7 +335,7 @@ break; } pos = tmp; - System.out.println("Instances used: " + pos.size()); + logger.info("Instances used: " + pos.size()); tmp = new HashSet<String>(); for (String one : neg) { @@ -343,10 +351,10 @@ instances.addAll(neg); for (String one : pos) { - System.out.println("+\"" + one + "\""); + logger.info("+\"" + one + "\""); } for (String one : neg) { - System.out.println("-\"" + one + "\""); + logger.info("-\"" + one + "\""); } /* @@ -365,12 +373,12 @@ } // the actual extraction is started here ont = m.extract(instances); - System.out.println("Number of cached SPARQL queries: " + logger.info("Number of cached SPARQL queries: " + m.getConfiguration().numberOfCachedSparqlQueries); - System.out.println("Number of uncached SPARQL queries: " + logger.info("Number of uncached SPARQL queries: " + m.getConfiguration().numberOfUncachedSparqlQueries); - System.out.println("Finished collecting Fragment"); + logger.info("Finished collecting Fragment"); if (dumpToFile) { String filename = System.currentTimeMillis() + ".nt"; @@ -401,7 +409,7 @@ } catch (Exception e) { e.printStackTrace(); } - System.out.println("SparqlModul: ****Finished"); + logger.info("SparqlModul: ****Finished"); } /* @@ -446,7 +454,7 @@ * @param limit */ public void calculateSubjects(String label, int limit) { - System.out.println("SparqlModul: Collecting Subjects"); + logger.info("SparqlModul: Collecting Subjects"); // oldSparqlOntologyCollector oc = new oldSparqlOntologyCollector(url); // try { Vector<String> v = (SparqlQuery.makeLabelQuery(label, limit, sse) @@ -458,7 +466,7 @@ // subjects = new String[1]; // subjects[0] = "[Error]Sparql Endpoint could not be reached."; // } - System.out.println("SparqlModul: ****Finished"); + logger.info("SparqlModul: ****Finished"); } /** @@ -467,7 +475,7 @@ * @param subject */ public void calculateTriples(String subject) { - System.out.println("SparqlModul: Collecting Triples"); + logger.info("SparqlModul: Collecting Triples"); Vector<StringTuple> v = (SparqlQuery.makeArticleQuery(subject, sse) .getAsVectorOfTupels("predicate", "objcet")); //String[] subjects = (String[]) v.toArray(new String[v.size()]); @@ -484,7 +492,7 @@ // triples = new String[1]; // triples[0] = "[Error]Sparql Endpoint could not be reached."; //} - System.out.println("SparqlModul: ****Finished"); + logger.info("SparqlModul: ****Finished"); } /** @@ -493,7 +501,7 @@ * @param concept */ public void calculateConceptSubjects(String concept) { - System.out.println("SparqlModul: Collecting Subjects"); + logger.info("SparqlModul: Collecting Subjects"); Vector<String> v = (SparqlQuery.makeConceptQuery(concept, sse) .getAsVector("subject")); conceptSubjects = (String[]) v.toArray(new String[v.size()]); @@ -506,7 +514,7 @@ // conceptSubjects = new String[1]; // conceptSubjects[0] = "[Error]Sparql Endpoint could not be reached."; // } - System.out.println("SparqlModul: ****Finished"); + logger.info("SparqlModul: ****Finished"); } public boolean subjectThreadIsRunning() { Modified: trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java =================================================================== --- trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java 2008-01-19 10:16:30 UTC (rev 399) +++ trunk/src/dl-learner/org/dllearner/server/DLLearnerWS.java 2008-01-19 14:32:48 UTC (rev 400) @@ -1,5 +1,5 @@ /** - * Copyright (C) 2007, Jens Lehmann + * Copyright (C) 2007-2008, Jens Lehmann * * This file is part of DL-Learner. * Modified: trunk/src/dl-learner/org/dllearner/server/DLLearnerWSStart.java =================================================================== --- trunk/src/dl-learner/org/dllearner/server/DLLearnerWSStart.java 2008-01-19 10:16:30 UTC (rev 399) +++ trunk/src/dl-learner/org/dllearner/server/DLLearnerWSStart.java 2008-01-19 14:32:48 UTC (rev 400) @@ -1,7 +1,24 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ package org.dllearner.server; - - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; @@ -11,61 +28,78 @@ import javax.xml.ws.Endpoint; +import org.apache.log4j.ConsoleAppender; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; +import org.apache.log4j.SimpleLayout; + import com.sun.net.httpserver.HttpContext; import com.sun.net.httpserver.HttpServer; - +/** + * Starts the DL-Learner web service. + * + * @author Jens Lehmann + * @author Sebastian Hellmann + * + */ public class DLLearnerWSStart { public static void main(String[] args) { - //String url = "http://139.18.114.78:8181/services"; - /*String url=""; - if (args.length > 0) - url = args[0];*/ - try{ - - InetSocketAddress isa=new InetSocketAddress("localhost",8181); - HttpServer server = HttpServer.create(isa, 5); - ExecutorService threads = Executors.newFixedThreadPool(5); - server.setExecutor(threads); - server.start(); - - System.out.print("Starting DL-Learner web service at http://" + - isa.getHostName()+":"+isa.getPort()+ "/services ... "); - Endpoint endpoint = Endpoint.create(new DLLearnerWS()); - //Endpoint endpoint = Endpoint.create(new CustomDataClass()); + + // create web service logger + SimpleLayout layout = new SimpleLayout(); + ConsoleAppender consoleAppender = new ConsoleAppender(layout); + Logger logger = Logger.getRootLogger(); + logger.removeAllAppenders(); + logger.addAppender(consoleAppender); + logger.setLevel(Level.INFO); + + InetSocketAddress isa = new InetSocketAddress("localhost", 8181); + HttpServer server = null; + try { + server = HttpServer.create(isa, 0); + } catch (IOException e1) { + e1.printStackTrace(); + } + ExecutorService threads = Executors.newFixedThreadPool(10); + server.setExecutor(threads); + server.start(); + + System.out.println("Starting DL-Learner web service at http://" + isa.getHostName() + ":" + + isa.getPort() + "/services ... "); + Endpoint endpoint = Endpoint.create(new DLLearnerWS()); + // Endpoint endpoint = Endpoint.create(new CustomDataClass()); HttpContext context = server.createContext("/services"); - endpoint.publish(context); - //Endpoint endpoint = Endpoint.publish(url, new DLLearnerWS()); - + endpoint.publish(context); + // Endpoint endpoint = Endpoint.publish(url, new DLLearnerWS()); + System.out.println("OK."); - - System.out.println("Type \"exit\" to terminate web service."); boolean terminate = false; String inputString = ""; do { BufferedReader input = new BufferedReader(new InputStreamReader(System.in)); - + try { inputString = input.readLine(); } catch (IOException e) { e.printStackTrace(); } - + if (inputString.equals("exit")) terminate = true; - + } while (!terminate); System.out.print("Stopping web service ... "); endpoint.stop(); - - server.stop(1); - threads.shutdown(); + + server.stop(1); + threads.shutdown(); System.out.println("OK."); - }catch (Exception e) {e.printStackTrace();} + } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-01-21 18:29:48
|
Revision: 406 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=406&view=rev Author: jenslehmann Date: 2008-01-21 10:29:46 -0800 (Mon, 21 Jan 2008) Log Message: ----------- created DBpedia Navigator logo Modified Paths: -------------- trunk/src/dbpedia-navigator/index.php Added Paths: ----------- trunk/resources/logos/dbpedia_navigator.png trunk/resources/logos/dbpedia_navigator.svg trunk/src/dbpedia-navigator/images/dbpedia_navigator.png Added: trunk/resources/logos/dbpedia_navigator.png =================================================================== (Binary files differ) Property changes on: trunk/resources/logos/dbpedia_navigator.png ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Added: trunk/resources/logos/dbpedia_navigator.svg =================================================================== --- trunk/resources/logos/dbpedia_navigator.svg (rev 0) +++ trunk/resources/logos/dbpedia_navigator.svg 2008-01-21 18:29:46 UTC (rev 406) @@ -0,0 +1,1167 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<!-- Generator: Adobe Illustrator 12.0.1, SVG Export Plug-In . SVG Version: 6.00 Build 51448) --> +<svg + xmlns:dc="http://purl.org/dc/elements/1.1/" + xmlns:cc="http://web.resource.org/cc/" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:svg="http://www.w3.org/2000/svg" + xmlns="http://www.w3.org/2000/svg" + xmlns:xlink="http://www.w3.org/1999/xlink" + xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" + xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" + version="1.1" + id="Layer_1" + width="262.551" + height="162.048" + viewBox="0 0 262.551 162.048" + overflow="visible" + enable-background="new 0 0 262.551 162.048" + xml:space="preserve" + sodipodi:version="0.32" + inkscape:version="0.45.1" + sodipodi:docname="dbpedia_navigator.svg" + sodipodi:docbase="/home/jl/promotion/dl-learner-svn/trunk/resources/logos" + inkscape:output_extension="org.inkscape.output.svg.inkscape" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/dbpedia_navigator.png" + inkscape:export-xdpi="45.850941" + inkscape:export-ydpi="45.850941"><metadata + id="metadata163"><rdf:RDF><cc:Work + rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type + rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs + id="defs161"><linearGradient + id="linearGradient4745"><stop + style="stop-color:#d27333;stop-opacity:1;" + offset="0" + id="stop4747" /><stop + id="stop4759" + offset="0.5" + style="stop-color:#eed619;stop-opacity:0.79591835;" /><stop + style="stop-color:#c87137;stop-opacity:0;" + offset="1" + id="stop4749" /></linearGradient><linearGradient + inkscape:collect="always" + id="linearGradient3726"><stop + style="stop-color:#004563;stop-opacity:1;" + offset="0" + id="stop3728" /><stop + style="stop-color:#004563;stop-opacity:0;" + offset="1" + id="stop3730" /></linearGradient><radialGradient + inkscape:collect="always" + xlink:href="#XMLID_18_" + id="radialGradient2315" + gradientUnits="userSpaceOnUse" + cx="131.353" + cy="59.6313" + r="55.6664" /><radialGradient + inkscape:collect="always" + xlink:href="#XMLID_18_" + id="radialGradient2317" + gradientUnits="userSpaceOnUse" + cx="131.353" + cy="59.6313" + r="55.6664" /><radialGradient + inkscape:collect="always" + xlink:href="#XMLID_18_" + id="radialGradient2319" + gradientUnits="userSpaceOnUse" + cx="131.353" + cy="59.6313" + r="55.6664" /><radialGradient + inkscape:collect="always" + xlink:href="#XMLID_18_" + id="radialGradient2321" + gradientUnits="userSpaceOnUse" + cx="131.353" + cy="59.6313" + r="55.6664" /><radialGradient + inkscape:collect="always" + xlink:href="#XMLID_18_" + id="radialGradient2323" + gradientUnits="userSpaceOnUse" + cx="131.353" + cy="59.6313" + r="55.6664" /><radialGradient + inkscape:collect="always" + xlink:href="#XMLID_18_" + id="radialGradient2325" + gradientUnits="userSpaceOnUse" + cx="131.353" + cy="59.6313" + r="55.6664" /><radialGradient + inkscape:collect="always" + xlink:href="#XMLID_18_" + id="radialGradient2327" + gradientUnits="userSpaceOnUse" + cx="131.353" + cy="59.6313" + r="55.6664" /><radialGradient + inkscape:collect="always" + xlink:href="#XMLID_18_" + id="radialGradient2329" + gradientUnits="userSpaceOnUse" + cx="131.353" + cy="59.6313" + r="55.6664" /><radialGradient + inkscape:collect="always" + xlink:href="#XMLID_18_" + id="radialGradient2331" + gradientUnits="userSpaceOnUse" + cx="131.353" + cy="59.6313" + r="55.6664" /> + + + + + +<linearGradient + y2="275.91718" + x2="136.23027" + y1="270.16718" + x1="133.39209" + gradientTransform="matrix(4.51801,0,0,4.51801,-375.76052,-663.87442)" + gradientUnits="userSpaceOnUse" + id="linearGradient2425" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="250.79219" + x2="140.48027" + y1="241.91719" + x1="158.39209" + gradientTransform="matrix(4.51801,0,0,4.51801,-375.76052,-573.51422)" + gradientUnits="userSpaceOnUse" + id="linearGradient2422" + xlink:href="#linearGradient3757" + inkscape:collect="always" /><linearGradient + y2="264.87277" + x2="288.16391" + y1="246.48801" + x1="279.02023" + gradientUnits="userSpaceOnUse" + id="linearGradient2389" + xlink:href="#linearGradient3584" + inkscape:collect="always" /><linearGradient + y2="227.07349" + x2="299.00925" + y1="249.7009" + x1="265.45667" + gradientUnits="userSpaceOnUse" + id="linearGradient2387" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="241.04185" + x2="247.02332" + y1="224.07129" + x1="238.06587" + gradientTransform="translate(38.53733,19.09189)" + gradientUnits="userSpaceOnUse" + id="linearGradient2385" + xlink:href="#linearGradient3561" + inkscape:collect="always" /><radialGradient + r="14.931247" + fy="254.06732" + fx="281.73419" + cy="254.06732" + cx="281.73419" + gradientTransform="matrix(1.307824,-0.739795,0.364242,0.643915,-178.7373,298.2997)" + gradientUnits="userSpaceOnUse" + id="radialGradient2383" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="237.95193" + x2="281.4285" + y1="278.77133" + x1="281.4285" + gradientTransform="matrix(3.4336876,0,0,3.4444224,-552.00629,-369.97815)" + gradientUnits="userSpaceOnUse" + id="linearGradient2376" + xlink:href="#linearGradient3628" + inkscape:collect="always" /><linearGradient + y2="264.87277" + x2="288.16391" + y1="246.48801" + x1="279.02023" + gradientUnits="userSpaceOnUse" + id="linearGradient2346" + xlink:href="#linearGradient3584" + inkscape:collect="always" /><linearGradient + y2="227.07349" + x2="299.00925" + y1="249.7009" + x1="265.45667" + gradientUnits="userSpaceOnUse" + id="linearGradient2344" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="241.04185" + x2="247.02332" + y1="224.07129" + x1="238.06587" + gradientTransform="translate(38.53733,19.09189)" + gradientUnits="userSpaceOnUse" + id="linearGradient2342" + xlink:href="#linearGradient3561" + inkscape:collect="always" /><radialGradient + r="14.931247" + fy="254.06732" + fx="281.73419" + cy="254.06732" + cx="281.73419" + gradientTransform="matrix(1.307824,-0.739795,0.364242,0.643915,-178.7373,298.2997)" + gradientUnits="userSpaceOnUse" + id="radialGradient2340" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="275.91718" + x2="136.23027" + y1="270.16718" + x1="133.39209" + gradientTransform="matrix(4.40658,0,0,4.40658,-365.3857,-860.3161)" + gradientUnits="userSpaceOnUse" + id="linearGradient2318" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="250.79219" + x2="140.48027" + y1="241.91719" + x1="158.39209" + gradientTransform="matrix(4.40658,0,0,4.40658,-365.3857,-772.1845)" + gradientUnits="userSpaceOnUse" + id="linearGradient2315" + xlink:href="#linearGradient3757" + inkscape:collect="always" /><linearGradient + y2="274.99829" + x2="211.88393" + y1="265.5892" + x1="211.88393" + gradientTransform="matrix(4.40658,0,0,4.40658,-535.2818,-853.5334)" + gradientUnits="userSpaceOnUse" + id="linearGradient2298" + xlink:href="#linearGradient3916" + inkscape:collect="always" /><linearGradient + y2="264.87277" + x2="288.16391" + y1="246.48801" + x1="279.02023" + gradientUnits="userSpaceOnUse" + id="linearGradient2044" + xlink:href="#linearGradient3584" + inkscape:collect="always" /><linearGradient + y2="227.07349" + x2="299.00925" + y1="249.7009" + x1="265.45667" + gradientUnits="userSpaceOnUse" + id="linearGradient2042" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="241.04185" + x2="247.02332" + y1="224.07129" + x1="238.06587" + gradientTransform="translate(38.53733,19.09189)" + gradientUnits="userSpaceOnUse" + id="linearGradient2040" + xlink:href="#linearGradient3561" + inkscape:collect="always" /><radialGradient + r="14.931247" + fy="254.06732" + fx="281.73419" + cy="254.06732" + cx="281.73419" + gradientTransform="matrix(1.307824,-0.739795,0.364242,0.643915,-178.7373,298.2997)" + gradientUnits="userSpaceOnUse" + id="radialGradient2038" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="274.99829" + x2="211.88393" + y1="265.5892" + x1="211.88393" + gradientTransform="matrix(4.40658,0,0,4.40658,-534.2818,-853.5334)" + gradientUnits="userSpaceOnUse" + id="linearGradient2036" + xlink:href="#linearGradient3916" + inkscape:collect="always" /><linearGradient + y2="246.54927" + x2="148.19162" + y1="253.29927" + x1="133.59119" + gradientTransform="matrix(1.108571,0,0,1.108571,-17.31497,-6.035963)" + gradientUnits="userSpaceOnUse" + id="linearGradient2009" + xlink:href="#linearGradient3776" + inkscape:collect="always" /><linearGradient + y2="250.79219" + x2="140.48027" + y1="241.91719" + x1="158.39209" + gradientTransform="translate(0,20)" + gradientUnits="userSpaceOnUse" + id="linearGradient2007" + xlink:href="#linearGradient3757" + inkscape:collect="always" /><linearGradient + y2="275.91718" + x2="136.23027" + y1="270.16718" + x1="133.39209" + gradientUnits="userSpaceOnUse" + id="linearGradient2005" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="246.54927" + x2="148.19162" + y1="253.29927" + x1="133.59119" + gradientTransform="matrix(1.108571,0,0,1.108571,-17.31497,-6.035963)" + gradientUnits="userSpaceOnUse" + id="linearGradient1993" + xlink:href="#linearGradient3776" + inkscape:collect="always" /><linearGradient + y2="250.79219" + x2="140.48027" + y1="241.91719" + x1="158.39209" + gradientTransform="translate(0,20)" + gradientUnits="userSpaceOnUse" + id="linearGradient1991" + xlink:href="#linearGradient3757" + inkscape:collect="always" /><linearGradient + y2="275.91718" + x2="136.23027" + y1="270.16718" + x1="133.39209" + gradientUnits="userSpaceOnUse" + id="linearGradient1989" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="264.87277" + x2="288.16391" + y1="246.48801" + x1="279.02023" + gradientUnits="userSpaceOnUse" + id="linearGradient1987" + xlink:href="#linearGradient3584" + inkscape:collect="always" /><linearGradient + y2="227.07349" + x2="299.00925" + y1="249.7009" + x1="265.45667" + gradientUnits="userSpaceOnUse" + id="linearGradient1985" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="241.04185" + x2="247.02332" + y1="224.07129" + x1="238.06587" + gradientTransform="translate(38.53733,19.09189)" + gradientUnits="userSpaceOnUse" + id="linearGradient1983" + xlink:href="#linearGradient3561" + inkscape:collect="always" /><radialGradient + r="14.931247" + fy="254.06732" + fx="281.73419" + cy="254.06732" + cx="281.73419" + gradientTransform="matrix(1.307824,-0.739795,0.364242,0.643915,-178.7373,298.2997)" + gradientUnits="userSpaceOnUse" + id="radialGradient1981" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="274.99829" + x2="211.88393" + y1="265.5892" + x1="211.88393" + gradientTransform="matrix(4.40658,0,0,4.40658,-534.2818,-853.5334)" + gradientUnits="userSpaceOnUse" + id="linearGradient1979" + xlink:href="#linearGradient3916" + inkscape:collect="always" /><linearGradient + y2="274.99829" + x2="211.88393" + y1="265.5892" + x1="211.88393" + gradientTransform="matrix(4.40658,0,0,4.40658,-534.2818,-853.5334)" + gradientUnits="userSpaceOnUse" + id="linearGradient3029" + xlink:href="#linearGradient3916" + inkscape:collect="always" /><linearGradient + y2="246.54927" + x2="148.19162" + y1="253.29927" + x1="133.59119" + gradientTransform="matrix(1.108571,0,0,1.108571,-17.31497,-6.035963)" + gradientUnits="userSpaceOnUse" + id="linearGradient3015" + xlink:href="#linearGradient3776" + inkscape:collect="always" /><linearGradient + y2="250.79219" + x2="140.48027" + y1="241.91719" + x1="158.39209" + gradientTransform="translate(0,20)" + gradientUnits="userSpaceOnUse" + id="linearGradient3013" + xlink:href="#linearGradient3757" + inkscape:collect="always" /><linearGradient + y2="275.91718" + x2="136.23027" + y1="270.16718" + x1="133.39209" + gradientUnits="userSpaceOnUse" + id="linearGradient3011" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="264.87277" + x2="288.16391" + y1="246.48801" + x1="279.02023" + gradientUnits="userSpaceOnUse" + id="linearGradient3009" + xlink:href="#linearGradient3584" + inkscape:collect="always" /><linearGradient + y2="227.07349" + x2="299.00925" + y1="249.7009" + x1="265.45667" + gradientUnits="userSpaceOnUse" + id="linearGradient3007" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="241.04185" + x2="247.02332" + y1="224.07129" + x1="238.06587" + gradientTransform="translate(38.53733,19.09189)" + gradientUnits="userSpaceOnUse" + id="linearGradient3005" + xlink:href="#linearGradient3561" + inkscape:collect="always" /><radialGradient + r="14.931247" + fy="254.06732" + fx="281.73419" + cy="254.06732" + cx="281.73419" + gradientTransform="matrix(1.307824,-0.739795,0.364242,0.643915,-178.7373,298.2997)" + gradientUnits="userSpaceOnUse" + id="radialGradient3003" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="294.29398" + x2="211.20313" + y1="253.56172" + x1="211.20313" + gradientTransform="translate(-38.55509,1.539214)" + gradientUnits="userSpaceOnUse" + id="linearGradient3001" + xlink:href="#linearGradient3687" + inkscape:collect="always" /><linearGradient + y2="246.54927" + x2="148.19162" + y1="253.29927" + x1="133.59119" + gradientTransform="matrix(1.108571,0,0,1.108571,-17.31497,-6.035963)" + gradientUnits="userSpaceOnUse" + id="linearGradient3859" + xlink:href="#linearGradient3776" + inkscape:collect="always" /><linearGradient + y2="250.79219" + x2="140.48027" + y1="241.91719" + x1="158.39209" + gradientTransform="translate(0,20)" + gradientUnits="userSpaceOnUse" + id="linearGradient3857" + xlink:href="#linearGradient3757" + inkscape:collect="always" /><linearGradient + y2="275.91718" + x2="136.23027" + y1="270.16718" + x1="133.39209" + gradientUnits="userSpaceOnUse" + id="linearGradient3855" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="264.87277" + x2="288.16391" + y1="246.48801" + x1="279.02023" + gradientUnits="userSpaceOnUse" + id="linearGradient3853" + xlink:href="#linearGradient3584" + inkscape:collect="always" /><linearGradient + y2="227.07349" + x2="299.00925" + y1="249.7009" + x1="265.45667" + gradientUnits="userSpaceOnUse" + id="linearGradient3851" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="241.04185" + x2="247.02332" + y1="224.07129" + x1="238.06587" + gradientTransform="translate(38.53733,19.09189)" + gradientUnits="userSpaceOnUse" + id="linearGradient3849" + xlink:href="#linearGradient3561" + inkscape:collect="always" /><radialGradient + r="14.931247" + fy="254.06732" + fx="281.73419" + cy="254.06732" + cx="281.73419" + gradientTransform="matrix(1.307824,-0.739795,0.364242,0.643915,-178.7373,298.2997)" + gradientUnits="userSpaceOnUse" + id="radialGradient3847" + xlink:href="#linearGradient3440" + inkscape:collect="always" /><linearGradient + y2="294.29398" + x2="211.20313" + y1="253.56172" + x1="211.20313" + gradientTransform="translate(-38.55509,1.539214)" + gradientUnits="userSpaceOnUse" + id="linearGradient3845" + xlink:href="#linearGradient3687" + inkscape:collect="always" /><linearGradient + id="linearGradient3440"><stop + style="stop-color:black;stop-opacity:1;" + offset="0" + id="stop3442" /><stop + id="stop3452" + offset="0.3125" + style="stop-color:black;stop-opacity:1;" /><stop + id="stop3446" + offset="0.53727454" + style="stop-color:white;stop-opacity:1;" /><stop + style="stop-color:white;stop-opacity:1;" + offset="0.60522962" + id="stop3542" /><stop + style="stop-color:black;stop-opacity:1;" + offset="0.6964286" + id="stop3448" /><stop + style="stop-color:black;stop-opacity:1;" + offset="1" + id="stop3444" /></linearGradient><linearGradient + id="linearGradient3555"><stop + style="stop-color:#319328;stop-opacity:1;" + offset="0" + id="stop3557" /><stop + style="stop-color:black;stop-opacity:0;" + offset="1" + id="stop3559" /></linearGradient><linearGradient + id="linearGradient3561" + inkscape:collect="always"><stop + id="stop3563" + offset="0" + style="stop-color:#cbecff;stop-opacity:1;" /><stop + id="stop3565" + offset="1" + style="stop-color:#cbecff;stop-opacity:0;" /></linearGradient><linearGradient + id="linearGradient3584"><stop + id="stop3586" + offset="0" + style="stop-color:white;stop-opacity:1;" /><stop + style="stop-color:white;stop-opacity:0.49803922;" + offset="1" + id="stop3592" /><stop + id="stop3588" + offset="1" + style="stop-color:white;stop-opacity:0;" /></linearGradient><linearGradient + id="linearGradient3628"><stop + id="stop3630" + offset="0" + style="stop-color:white;stop-opacity:1;" /><stop + id="stop3632" + offset="1" + style="stop-color:#ffffff;stop-opacity:0.62962961;" /></linearGradient><linearGradient + id="linearGradient3687"><stop + style="stop-color:#8b969f;stop-opacity:1;" + offset="0" + id="stop3693" /><stop + id="stop3737" + offset="0" + style="stop-color:white;stop-opacity:1;" /><stop + id="stop3735" + offset="1" + style="stop-color:white;stop-opacity:1;" /></linearGradient><linearGradient + id="linearGradient3757"><stop + id="stop3759" + offset="0" + style="stop-color:black;stop-opacity:1;" /><stop + id="stop3761" + offset="1" + style="stop-color:black;stop-opacity:0;" /></linearGradient><linearGradient + id="linearGradient3776"><stop + style="stop-color:white;stop-opacity:1;" + offset="0" + id="stop3778" /><stop + id="stop3786" + offset="1" + style="stop-color:white;stop-opacity:0;" /></linearGradient><linearGradient + id="linearGradient3916"><stop + style="stop-color:#8b8b8b;stop-opacity:0.28703704;" + offset="0" + id="stop3918" /><stop + id="stop3924" + offset="0.44642857" + style="stop-color:#8d8d8d;stop-opacity:0.09259259;" /><stop + id="stop3922" + offset="1" + style="stop-color:#8f8f8f;stop-opacity:0;" /></linearGradient><linearGradient + inkscape:collect="always" + xlink:href="#linearGradient3726" + id="linearGradient3732" + x1="403.47693" + y1="110.51495" + x2="404.33432" + y2="182.67908" + gradientUnits="userSpaceOnUse" /><linearGradient + inkscape:collect="always" + xlink:href="#linearGradient4745" + id="linearGradient4751" + x1="27.008001" + y1="108.4607" + x2="26.615448" + y2="208.34743" + gradientUnits="userSpaceOnUse" /><linearGradient + inkscape:collect="always" + xlink:href="#linearGradient4745" + id="linearGradient4761" + x1="77.506302" + y1="115.13101" + x2="79.262497" + y2="194.01152" + gradientUnits="userSpaceOnUse" /><linearGradient + inkscape:collect="always" + xlink:href="#linearGradient4745" + id="linearGradient4763" + x1="119.31952" + y1="122.0713" + x2="119.31952" + y2="190.66307" + gradientUnits="userSpaceOnUse" /><linearGradient + inkscape:collect="always" + xlink:href="#linearGradient4745" + id="linearGradient4765" + x1="159.87889" + y1="121.91031" + x2="159.87889" + y2="178.4985" + gradientUnits="userSpaceOnUse" /><linearGradient + inkscape:collect="always" + xlink:href="#linearGradient4745" + id="linearGradient4767" + x1="199.39757" + y1="120.41837" + x2="199.39757" + y2="195.0119" + gradientUnits="userSpaceOnUse" /><linearGradient + inkscape:collect="always" + xlink:href="#linearGradient4745" + id="linearGradient4769" + x1="226.55807" + y1="115.13101" + x2="226.55807" + y2="190.58194" + gradientUnits="userSpaceOnUse" /><linearGradient + inkscape:collect="always" + xlink:href="#linearGradient4745" + id="linearGradient4771" + x1="251.63084" + y1="121.91031" + x2="251.63084" + y2="190.50206" + gradientUnits="userSpaceOnUse" /></defs><sodipodi:namedview + inkscape:window-height="951" + inkscape:window-width="1280" + inkscape:pageshadow="2" + inkscape:pageopacity="0.0" + guidetolerance="10.0" + gridtolerance="10.0" + objecttolerance="10.0" + borderopacity="1.0" + bordercolor="#666666" + pagecolor="#ffffff" + id="base" + inkscape:zoom="1.1663211" + inkscape:cx="129.32206" + inkscape:cy="78.817407" + inkscape:window-x="0" + inkscape:window-y="0" + inkscape:current-layer="Layer_1" /> +<g + id="g94" + transform="translate(10.400001,0)" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"> + <path + style="fill:#004563" + id="path96" + d="M 184.007,65.5 C 182.079,65.529 179.952,65.49 177.702,65.451 C 163.736,65.202 143.662,64.867 131.859,80.32 C 120.055,64.867 99.982,65.203 86.014,65.451 C 83.764,65.49 81.638,65.529 79.711,65.5 C 79.249,65.495 74.856,65.636 66.651,65.918 C 66.651,65.918 67.207,76.222 74.456,76.222 C 77.059,76.222 78.941,76.161 79.636,76.156 C 81.659,76.183 83.866,76.144 86.203,76.104 C 105.263,75.764 120.382,76.828 127.162,93.468 L 128.514,96.786 L 131.618,96.786 L 132.094,96.786 L 135.2,96.786 L 136.553,93.468 C 143.332,76.828 158.45,75.763 177.511,76.104 C 179.849,76.145 182.056,76.184 184.079,76.156 C 184.772,76.161 186.656,76.222 189.258,76.222 C 196.509,76.222 197.064,65.918 197.064,65.918 C 188.861,65.636 184.467,65.496 184.007,65.5 z " /> + </g><g + id="g98" + transform="translate(10,0)" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"> + <radialGradient + gradientUnits="userSpaceOnUse" + r="63.037201" + cy="61.586399" + cx="130.28909" + id="XMLID_17_"> + <stop + id="stop101" + style="stop-color:#6D8896" + offset="0" /> + <stop + id="stop103" + style="stop-color:#4B7487" + offset="0.0435" /> + <stop + id="stop105" + style="stop-color:#286379" + offset="0.0933" /> + <stop + id="stop107" + style="stop-color:#00556C" + offset="0.1494" /> + <stop + id="stop109" + style="stop-color:#004962" + offset="0.2114" /> + <stop + id="stop111" + style="stop-color:#004059" + offset="0.2815" /> + <stop + id="stop113" + style="stop-color:#003951" + offset="0.3633" /> + <stop + id="stop115" + style="stop-color:#00344C" + offset="0.4644" /> + <stop + id="stop117" + style="stop-color:#003149" + offset="0.6062" /> + <stop + id="stop119" + style="stop-color:#003048" + offset="1" /> + </radialGradient> + <path + style="fill:url(#XMLID_17_)" + id="path121" + d="M 137.327,1.135 L 137.327,1.135 C 134.404,2.502 132.185,4.925 131.084,7.957 C 130.597,9.304 130.354,10.698 130.354,12.089 C 130.354,13.835 130.736,15.576 131.5,17.206 C 133.281,21.019 136.909,23.466 141.004,23.967 C 141.11,25.109 141.231,26.251 141.231,27.408 C 141.231,27.961 141.19,28.516 141.167,29.071 C 140.673,29.198 140.151,29.205 139.685,29.423 C 137.678,30.359 136.16,32.02 135.405,34.1 C 135.07,35.022 134.904,35.978 134.904,36.931 C 134.904,38.129 135.168,39.322 135.689,40.439 C 136.169,41.462 136.904,42.303 137.743,43.044 C 136.555,45.673 135.058,48.257 133.248,50.761 C 133.078,50.722 132.911,50.685 132.738,50.657 C 132.192,47.642 131.412,44.728 130.383,41.964 C 130.994,40.881 131.402,39.714 131.402,38.502 C 131.402,37.48 131.182,36.453 130.731,35.487 C 129.621,33.113 127.355,31.639 124.8,31.442 C 121.846,27.261 118.241,23.664 114.076,20.688 C 114.19,20.089 114.387,19.494 114.387,18.898 C 114.387,14.981 111.962,11.286 108.083,9.871 C 103.118,8.069 97.612,10.644 95.809,15.607 C 94.936,18.013 95.051,20.614 96.134,22.932 C 97.217,25.25 99.139,27.006 101.545,27.879 C 104.416,28.92 107.534,28.337 110.037,26.673 C 113.208,28.923 115.974,31.646 118.303,34.789 C 117.575,35.941 117.064,37.2 117.064,38.542 C 117.064,39.561 117.285,40.589 117.736,41.559 C 118.898,44.039 121.315,45.55 123.951,45.648 C 124.655,47.776 125.214,50.017 125.606,52.357 C 125.385,52.528 125.156,52.69 124.953,52.881 C 121.64,50.612 118.178,48.751 114.595,47.38 C 114.469,46.952 114.341,46.514 114.341,46.514 C 113.527,44.775 112.087,43.458 110.283,42.805 C 108.483,42.154 106.537,42.243 104.804,43.055 C 104.035,43.412 103.383,43.949 102.787,44.563 C 98.811,44.174 94.823,44.363 90.879,45.015 C 88.29,41.157 83.26,39.578 78.934,41.604 C 74.143,43.846 72.069,49.565 74.309,54.353 C 76.552,59.142 82.271,61.214 87.058,58.978 C 89.887,57.655 91.748,55.091 92.32,52.105 C 95.217,51.623 98.156,51.437 101.115,51.66 C 101.208,51.961 101.212,52.294 101.345,52.58 C 103.02,56.163 107.294,57.717 110.874,56.045 C 111.652,55.679 112.314,55.155 112.905,54.554 C 116.028,55.862 119.087,57.591 122.028,59.759 C 122.045,61.07 122.327,62.363 122.887,63.547 C 123.927,65.781 125.783,67.481 128.109,68.325 C 130.437,69.171 132.953,69.059 135.195,68.011 C 137.674,66.852 139.401,64.681 140.121,62.095 C 145.344,61.342 150.215,59.909 154.64,57.799 C 156.662,59.017 159.13,59.285 161.313,58.264 C 163.885,57.059 165.428,54.493 165.428,51.777 C 165.428,51.451 165.315,51.121 165.269,50.791 C 167.486,48.82 169.509,46.62 171.325,44.228 C 173.703,44.813 176.183,44.71 178.436,43.654 C 181.042,42.434 183.015,40.273 183.993,37.573 C 186.018,31.994 183.127,25.808 177.544,23.775 C 171.964,21.756 165.773,24.647 163.742,30.225 C 163.31,31.418 163.1,32.653 163.1,33.88 C 163.1,36.176 163.865,38.417 165.249,40.288 C 163.912,42.019 162.423,43.638 160.783,45.122 C 158.99,44.438 157.016,44.442 155.242,45.274 C 152.822,46.402 151.316,48.792 151.181,51.392 C 147.577,53.042 143.601,54.179 139.328,54.807 C 141.601,51.609 143.48,48.291 144.891,44.892 C 145.501,44.764 146.134,44.706 146.7,44.442 C 148.707,43.505 150.227,41.841 150.982,39.759 C 151.317,38.837 151.483,37.883 151.483,36.93 C 151.483,35.732 151.22,34.541 150.698,33.424 C 150.156,32.265 149.295,31.331 148.301,30.539 C 148.371,29.507 148.442,28.473 148.442,27.435 C 148.442,25.856 148.288,24.28 148.126,22.706 C 150.765,21.3 152.792,19.038 153.817,16.207 C 154.306,14.862 154.548,13.469 154.548,12.081 C 154.548,10.334 154.165,8.595 153.403,6.968 C 150.579,0.932 143.368,-1.686 137.327,1.135 z " /> + </g><g + id="g123" + transform="translate(10,0)" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"> + <g + id="g125"> + <g + id="g127"> + <radialGradient + gradientUnits="userSpaceOnUse" + r="55.666401" + cy="59.631302" + cx="131.353" + id="XMLID_18_"> + <stop + id="stop130" + style="stop-color:#FFD528" + offset="0" /> + <stop + id="stop132" + style="stop-color:#FFD227" + offset="0.222" /> + <stop + id="stop134" + style="stop-color:#FFC925" + offset="0.4435" /> + <stop + id="stop136" + style="stop-color:#FDBB21" + offset="0.6647" /> + <stop + id="stop138" + style="stop-color:#FAA61D" + offset="0.8846" /> + <stop + id="stop140" + style="stop-color:#F8991D" + offset="1" /> + </radialGradient> + <path + style="fill:url(#radialGradient2315)" + id="path142" + d="M 128.544,38.833 C 128.716,36.449 126.922,34.378 124.538,34.207 C 122.153,34.036 120.084,35.83 119.913,38.214 C 119.741,40.598 121.536,42.669 123.924,42.838 C 126.303,43.011 128.374,41.217 128.544,38.833 z " /> + <circle + sodipodi:ry="6.6900001" + sodipodi:rx="6.6900001" + sodipodi:cy="59.631001" + sodipodi:cx="131.353" + style="fill:url(#radialGradient2317)" + id="circle144" + r="6.6900001" + cy="59.631001" + cx="131.353" /> + <circle + sodipodi:ry="6.493" + sodipodi:rx="6.493" + sodipodi:cy="50.263" + sodipodi:cx="82.981003" + style="fill:url(#radialGradient2319)" + id="circle146" + r="6.493" + cy="50.263" + cx="82.981003" /> + <path + style="fill:url(#radialGradient2321)" + id="path148" + d="M 111.45,19.487 C 111.707,15.91 109.013,12.803 105.436,12.546 C 101.861,12.293 98.754,14.981 98.499,18.558 C 98.242,22.135 100.932,25.243 104.513,25.499 C 108.087,25.755 111.193,23.063 111.45,19.487 z " /> + <path + style="fill:url(#radialGradient2323)" + id="path150" + d="M 108.147,45.254 C 105.775,45.083 103.714,46.866 103.542,49.24 C 103.374,51.612 105.156,53.673 107.531,53.844 C 109.903,54.013 111.965,52.228 112.136,49.856 C 112.303,47.484 110.52,45.422 108.147,45.254 z " /> + <path + style="fill:url(#radialGradient2325)" + id="path152" + d="M 174.369,26.85 C 170.475,26.571 167.092,29.501 166.814,33.396 C 166.535,37.291 169.465,40.673 173.358,40.953 C 177.255,41.234 180.635,38.302 180.917,34.407 C 181.194,30.514 178.264,27.129 174.369,26.85 z " /> + <circle + sodipodi:ry="4.3270001" + sodipodi:rx="4.3270001" + sodipodi:cy="51.766998" + sodipodi:cx="158.276" + style="fill:url(#radialGradient2327)" + id="circle154" + r="4.3270001" + cy="51.766998" + cx="158.276" /> + <circle + sodipodi:ry="5.2259998" + sodipodi:rx="5.2259998" + sodipodi:cy="36.931999" + sodipodi:cx="143.19099" + style="fill:url(#radialGradient2329)" + id="circle156" + r="5.2259998" + cy="36.931999" + cx="143.19099" /> + <path + style="fill:url(#radialGradient2331)" + id="path158" + d="M 143.05,3.719 C 138.426,3.386 134.412,6.865 134.079,11.487 C 133.751,16.112 137.226,20.127 141.849,20.458 C 146.475,20.788 150.487,17.309 150.819,12.687 C 151.147,8.064 147.673,4.049 143.05,3.719 z " /> + </g> + </g> + </g> +<g + id="g3500" + transform="matrix(0.4949083,0,0,0.4925116,271.15933,-90.486856)" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"><path + id="path3765" + d="M 336.47828,284.92223 L 316.10957,293.84463 C 313.79541,297.56435 317.4864,305.38612 321.52323,305.77095 L 341.44661,296.32214 L 336.47828,284.92223 z " + style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.86199999;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1" /><path + sodipodi:nodetypes="ccccc" + id="rect3739" + d="M 182.48082,352.78607 L 325.70203,284.53687 C 335.11753,284.77963 340.43089,296.162 335.50081,304.4038 L 193.86007,375.85734 C 184.57091,375.92987 176.81233,360.89136 182.48082,352.78607 z " + style="fill:url(#linearGradient2318);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.86199999;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1" /><path + style="fill:url(#linearGradient2315);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.86199999;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1" + d="M 182.48082,352.78607 L 325.70203,284.53687 C 335.11753,284.77963 340.43089,296.162 335.50081,304.4038 L 193.86007,375.85734 C 184.57091,375.92987 176.81233,360.89136 182.48082,352.78607 z " + id="path3747" + sodipodi:nodetypes="ccccc" /><path + sodipodi:nodetypes="cccccsccsc" + id="path3638" + d="M 405.38534,310.4922 L 385.55573,311.59385 C 359.13806,313.18352 338.86444,338.29675 335.844,370.25644 L 355.67361,370.25644 C 357.9199,341.78941 373.72415,318.57598 395.60824,312.14467 C 405.13821,313.78524 414.17576,318.49235 421.77231,325.50211 C 433.25942,336.10188 441.7338,351.90528 444.76914,370.25644 L 483.03509,370.25644 C 461.48308,335.93615 460.67205,334.18497 448.98673,325.77752 C 430.30807,312.33847 421.14164,311.78051 405.38534,310.4922 z " + style="fill:url(#linearGradient2298);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.86165088;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1" /><g + transform="matrix(4.40658,0,0,4.40658,-838.2272,-849.0049)" + id="g3707"><path + style="fill:url(#radialGradient2038);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.86165088;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1" + d="M 294.09375,241.71875 C 296.60637,245.96256 292.49091,253.04062 284.90625,257.53125 C 277.32158,262.0219 269.13762,262.2438 266.625,258 L 268.28125,260.75 C 270.79387,264.99378 278.97784,264.80314 286.5625,260.3125 C 294.14717,255.82187 298.26262,248.74379 295.75,244.5 L 294.09375,241.71875 z " + id="path3709" /><path + style="fill:url(#linearGradient2040);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.86165088;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1" + id="path3711" + d="M 294.64549,242.80613 C 297.1581,247.04993 293.04166,254.13875 285.457,258.62938 C 277.87234,263.12002 269.67745,263.32036 267.16483,259.07656 C 264.65221,254.83277 268.76865,247.74394 276.35331,243.2533 C 283.93797,238.76266 292.13287,238.56234 294.64549,242.80613 z " /><path + style="fill:url(#linearGradient2042);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.86165088;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1" + d="M 286.875,238.65625 C 283.56411,238.73719 279.60483,239.91093 275.8125,242.15625 C 268.22784,246.64689 264.11238,253.7562 266.625,258 L 268.28125,260.75 C 265.76864,256.5062 269.8841,249.42813 277.46875,244.9375 C 285.05341,240.44686 293.23739,240.25621 295.75,244.5 L 294.09375,241.71875 C 292.83744,239.59686 290.18589,238.57531 286.875,238.65625 z " + id="path3713" /><path + style="fill:url(#linearGradient2044);fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.86165088;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1" + d="M 285.45704,244.46552 C 283.31014,244.66573 280.86999,245.46534 278.52687,246.85348 C 274.4655,249.25954 271.83918,252.74383 271.59561,255.63116 C 273.18683,253.30771 275.87134,250.87298 279.25904,248.87625 C 283.22999,246.53575 287.26725,245.35068 290.28521,245.39663 C 289.08728,244.5995 287.39668,244.28464 285.45704,244.46552 z " + id="path3715" /></g><path + d="M 463.65584,227.89473 C 474.72786,246.59538 456.58844,277.83283 423.16603,297.62115 C 389.74362,317.40951 353.63218,318.29233 342.56013,299.59168 C 331.48807,280.89108 349.62748,249.65359 383.04989,229.86522 C 416.4723,210.07686 452.58378,209.19413 463.65584,227.89473 z " + id="path3415" + style="fill:none;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:2.91715598;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1" /><path + sodipodi:nodetypes="csssssssc" + id="path3397" + d="M 196.28433,365.04388 C 196.28433,369.98414 194.47942,373.0542 191.89584,375.64607 C 191.24652,376.29748 189.98347,377.13062 189.1265,377.13062 C 188.07149,377.13062 186.83761,375.73513 185.82978,374.97532 C 183.29423,373.06374 181.22611,369.04952 181.22611,364.45226 C 181.22611,359.57809 183.30098,355.29312 185.75709,353.56075 C 186.53834,353.00971 188.12337,352.02504 189.0185,352.02504 C 189.79358,352.02504 190.89912,353.01227 191.43188,353.56769 C 193.51993,355.74455 196.28433,359.96178 196.28433,365.04388 z " + style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.72248864;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1" + transform="matrix(0.6564768,-0.4924735,0.4446977,0.8993476,-98.158225,129.48989)" /></g><g + id="g3296" + transform="translate(10,0)" + style="fill:url(#linearGradient3732);fill-opacity:1.0" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"><text + id="text3190" + y="143.048" + x="270.39694" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient3732);fill-opacity:1.0;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + xml:space="preserve"><tspan + y="143.048" + x="270.39694" + id="tspan3192" + sodipodi:role="line" + style="fill-opacity:1.0;fill:url(#linearGradient3732)">N</tspan></text> +<text + id="text3194" + y="142.13394" + x="318.22372" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient3732);fill-opacity:1.0;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + xml:space="preserve"><tspan + y="142.13394" + x="318.22372" + id="tspan3196" + sodipodi:role="line" + style="fill-opacity:1.0;fill:url(#linearGradient3732)">a</tspan></text> +<text + id="text3198" + y="143.048" + x="354.25006" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient3732);fill-opacity:1.0;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + xml:space="preserve"><tspan + y="143.048" + x="354.25006" + id="tspan3200" + sodipodi:role="line" + style="fill-opacity:1.0;fill:url(#linearGradient3732)">v</tspan></text> +<text + id="text3202" + y="143.048" + x="392.46439" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient3732);fill-opacity:1.0;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + xml:space="preserve"><tspan + y="143.048" + x="392.46439" + id="tspan3204" + sodipodi:role="line" + style="fill-opacity:1.0;fill:url(#linearGradient3732)">i</tspan></text> +<text + id="text3206" + y="144.56596" + x="406.64578" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient3732);fill-opacity:1.0;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + xml:space="preserve"><tspan + y="144.56596" + x="406.64578" + id="tspan3208" + sodipodi:role="line" + style="fill-opacity:1.0;fill:url(#linearGradient3732)">g</tspan></text> +<text + id="text3210" + y="142.13394" + x="447.80374" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient3732);fill-opacity:1.0;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + xml:space="preserve"><tspan + y="142.13394" + x="447.80374" + id="tspan3212" + sodipodi:role="line" + style="fill-opacity:1.0;fill:url(#linearGradient3732)">a</tspan></text> +<text + id="text3214" + y="142.13394" + x="486.03033" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient3732);fill-opacity:1.0;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + xml:space="preserve"><tspan + y="142.13394" + x="486.03033" + id="tspan3216" + sodipodi:role="line" + style="fill-opacity:1.0;fill:url(#linearGradient3732)">t</tspan></text> +<text + id="text3218" + y="142.13394" + x="505.88095" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient3732);fill-opacity:1.0;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + xml:space="preserve"><tspan + y="142.13394" + x="505.88095" + id="tspan3220" + sodipodi:role="line" + style="fill-opacity:1.0;fill:url(#linearGradient3732)">o</tspan></text> +<text + id="text3222" + y="143.048" + x="547.52039" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient3732);fill-opacity:1.0;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + xml:space="preserve"><tspan + y="143.048" + x="547.52039" + id="tspan3224" + sodipodi:role="line" + style="fill-opacity:1.0;fill:url(#linearGradient3732)">r</tspan></text> +</g><g + id="g5" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"> + <g + id="g7"> + <path + style="fill:#ffffff;stroke:#ffffff;stroke-width:6.06710005" + id="path9" + d="M 3.034,86.053 L 18.381,86.053 C 35.577,86.053 50.764,91.838 50.764,114.097 C 50.764,136.356 35.577,142.14 18.381,142.14 L 3.034,142.14 L 3.034,86.053 z M 14.281,133.301 L 20.551,133.301 C 29.952,133.301 39.033,126.31 39.033,114.097 C 39.033,101.881 29.952,94.893 20.551,94.893 L 14.281,94.893 L 14.281,133.301 z " /> + <path + style="fill:#ffffff;stroke:#ffffff;stroke-width:6.06710005" + id="path11" + d="M 51.404,86.053 L 71.012,86.053 C 78.243,86.053 89.09,88.141 89.09,100.436 C 89.09,107.104 84.59,111.766 77.92,112.973 L 77.92,113.133 C 85.555,113.855 90.778,119 90.778,125.908 C 90.778,140.052 78.081,142.139 71.011,142.139 L 51.404,142.139 L 51.404,86.053 z M 62.654,108.715 L 67.235,108.715 C 71.974,108.715 77.841,107.667 77.841,101.881 C 77.841,95.375 72.136,94.893 66.994,94.893 L 62.653,94.893 L 62.653,108.715 L 62.654,108.715 z M 62.654,133.301 L 67.958,133.301 C 73.181,133.301 79.53,132.017 79.53,125.506 C 79.53,118.194 73.582,117.069 67.958,117.069 L 62.654,117.069 L 62.654,133.301 z " /> + <path + style="fill:#ffffff;stroke:#ffffff;stroke-width:6.06710005" + id="path13" + d="M 114.804,99.794 C 108.455,99.794 104.358,102.606 101.948,106.623 L 101.786,106.623 L 101.786,100.757 L 91.501,100.757 L 91.501,159.014 L 100.539,159.014 C 101.267,158.612 101.867,157.973 102.269,157.027 L 102.269,137.399 L 102.428,137.399 C 106.205,141.979 109.981,143.106 114.563,143.106 C 127.499,143.106 132.16,132.661 132.16,121.329 C 132.16,109.916 127.499,99.794 114.804,99.794 z M 111.831,134.748 C 104.679,134.748 102.347,127.194 102.347,121.328 C 102.347,115.543 105.081,108.15 111.991,108.15 C 118.981,108.15 120.91,115.785 120.91,121.328 C 120.91,127.035 118.901,134.748 111.831,134.748 z " /> + <path + style="fill:#ffffff;stroke:#ffffff;stroke-width:6.06710005" + id="path15" + d="M 100.54,159.014 L 102.27,159.014 L 102.27,157.027 C 101.867,157.973 101.268,158.611 100.54,159.014 z " /> + <path + style="fill:#ffffff;stroke:#ffffff;stroke-width:6.06710005" + id="path17" + d="M 165.748,139.811 C 161.81,141.979 157.31,143.106 151.523,143.106 C 137.865,143.106 129.91,135.23 129.91,121.651 C 129.91,109.678 136.258,99.795 149.114,99.795 C 164.461,99.795 168.801,110.321 168.801,124.786 L 140.195,124.786 C 140.678,131.452 145.339,135.23 152.008,135.23 C 157.229,135.23 161.73,133.301 165.748,131.053 L 165.748,139.811 L 165.748,139.811 z M 158.517,117.393 C 158.194,112.17 155.785,107.667 149.757,107.667 C 143.731,107.667 140.678,111.846 140.195,117.393 L 158.517,117.393 z " /> + <path + style="fill:#ffffff;stroke:#ffffff;stroke-width:6.06710005" + id="path19" + d="M 205.762,81.874 L 196.443,81.874 L 196.443,105.579 L 196.283,105.579 C 193.068,101.641 189.293,99.794 183.909,99.794 C 171.211,99.794 166.551,109.916 166.551,121.328 C 166.551,132.66 171.211,143.105 183.909,143.105 C 188.97,143.105 193.391,141.498 196.604,137.478 L 196.765,137.478 L 196.765,142.139 L 207.211,142.139 L 207.211,83.52 C 206.846,82.777 206.352,82.239 205.762,81.874 z M 186.88,134.748 C 179.811,134.748 177.8,127.035 177.8,121.328 C 177.8,115.785 179.73,108.15 186.721,108.15 C 193.631,108.15 196.364,115.543 196.364,121.328 C 196.363,127.194 194.03,134.748 186.88,134.748 z " /> + <path + style="fill:#ffffff;stroke:#ffffff;stroke-width:6.06710005" + id="path21" + d="M 205.762,81.874 C 206.352,82.239 206.846,82.777 207.211,83.52 L 207.211,81.874 L 205.762,81.874 z " /> + <rect + style="fill:#ffffff;stroke:#ffffff;stroke-width:6.06710005" + id="rect23" + height="41.382999" + width="10.768" + y="100.757" + x="210.505" /> + <rect + style="fill:#ffffff;stroke:#ffffff;stroke-width:6.06710005" + id="rect25" + height="10.287" + width="10.768" + y="82.917999" + x="210.505" /> + <path + style="fill:#ffffff;stroke:#ffffff;stroke-width:6.06710005" + id="path27" + d="M 226.415,102.926 C 230.591,100.999 236.217,99.794 240.797,99.794 C 253.412,99.794 258.638,105.016 258.638,117.231 L 258.638,122.536 C 258.638,126.714 258.719,129.848 258.796,132.902 C 258.878,136.034 259.038,138.925 259.28,142.14 L 249.798,142.14 C 249.396,139.971 249.396,137.241 249.315,135.955 L 249.156,135.955 C 246.666,140.535 241.281,143.105 236.378,143.105 C 229.066,143.105 221.915,138.686 221.915,130.812 C 221.915,124.624 224.888,121.007 228.986,119 C 233.086,116.99 238.388,116.589 242.888,116.589 L 248.834,116.589 C 248.834,109.916 245.861,107.667 239.513,107.667 C 234.933,107.667 230.351,109.438 226.737,112.17 L 226.415,102.926 z M 239.03,135.229 C 242.325,135.229 244.894,133.783 246.583,131.536 C 248.35,129.205 248.834,126.228 248.834,123.016 L 244.173,123.016 C 239.352,123.016 232.201,123.82 232.201,130.167 C 232.201,133.701 235.173,135.229 239.03,135.229 z " /> + </g> + </g><text + xml:space="preserve" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient4751);fill-opacity:1.0;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + x="-2.4250796" + y="143.048" + id="text3268" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"><tspan + sodipodi:role="line" + id="tspan3270" + x="-2.4250796" + y="143.048" + style="fill-opacity:1.0;fill:url(#linearGradient4751)">D</tspan></text> +<text + xml:space="preserve" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient4761);fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + x="45.214111" + y="143.048" + id="text3272" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"><tspan + sodipodi:role="line" + id="tspan3274" + x="45.214111" + y="143.048" + style="fill:url(#linearGradient4761)">B</tspan></text> +<text + xml:space="preserve" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient4763);fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + x="94.297058" + y="144.06128" + id="text3276" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"><tspan + sodipodi:role="line" + id="tspan3278" + x="94.297058" + y="144.06128" + style="fill:url(#linearGradient4763)">p</tspan></text> +<text + xml:space="preserve" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient4765);fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + x="138.39842" + y="142.13394" + id="text3280" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"><tspan + sodipodi:role="line" + id="tspan3282" + x="138.39842" + y="142.13394" + style="fill:url(#linearGradient4765)">e</tspan></text> +<text + xml:space="preserve" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient4767);fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + x="176.45079" + y="142.73395" + id="text3284" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"><tspan + sodipodi:role="line" + id="tspan3286" + x="176.45079" + y="142.73395" + style="fill:url(#linearGradient4767)">d</tspan></text> +<text + xml:space="preserve" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient4769);fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + x="215.60837" + y="143.048" + id="text3288" + inkscape:export-filename="/home/jl/promotion/dl-learner-svn/trunk/resources/logos/text3292.png" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"><tspan + sodipodi:role="line" + id="tspan3290" + x="215.60837" + y="143.048" + style="fill:url(#linearGradient4769)">i</tspan></text> +<text + xml:space="preserve" + style="font-size:78px;font-style:normal;font-weight:bold;fill:url(#linearGradient4771);fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Arial" + x="229.86473" + y="142.13394" + id="text3292" + inkscape:export-xdpi="67.5" + inkscape:export-ydpi="67.5"><tspan + sodipodi:role="line" + id="tspan3294" + x="229.86473" + y="142.13394" + style="fill:url(#linearGradient4771)">a</tspan></text> +</svg> \ No newline at end of file Added: trunk/src/dbpedia-navigator/images/dbpedia_navigator.png =================================================================== (Binary files differ) Property changes on: trunk/src/dbpedia-navigator/images/dbpedia_navigator.png ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Modified: trunk/src/dbpedia-navigator/index.php =================================================================== --- trunk/src/dbpedia-navigator/index.php 2008-01-21 02:10:46 UTC (rev 405) +++ trunk/src/dbpedia-navigator/index.php 2008-01-21 18:29:46 UTC (rev 406) @@ -69,7 +69,8 @@ </head> <body> -<h1>DBpedia Navigator</h1> +<!-- <h1>DBpedia Navigator</h1> --> +<img src="images/dbpedia_navigator.png" alt="DBpedia Navigator" style="padding:5px" /> <div id="layer" style="display:none"> <div id="layerContent" style="display:none"></div> </div> @@ -241,7 +242,7 @@ tabs etc.) as an overlay, because the Wikipedia article will almost always be a human-friendlier description of an object compared to the extracted one.</li> <li>Handle redirect for example if you look for 'Deutschland'</li> - <li>Ich habe mir gestern deswegen mal angeschaut welche fertigen SPARQL-APIs es gibt. Eine der verbreitesten und deshalb auch aktiven Projekt ist Jena. F\xFCr uns relevant ist das ARQ-Teilprojekt: http://jena.sourceforge.net/ARQ/. + <li>Ich habe mir gestern deswegen mal angeschaut welche fertigen SPARQL-APIs es gibt. Eine der verbreitesten und deshalb auch aktiven Projekt ist Jena. F�r uns relevant ist das ARQ-Teilprojekt: http://jena.sourceforge.net/ARQ/. </li> </ul> </div> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-01-23 12:05:50
|
Revision: 418 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=418&view=rev Author: jenslehmann Date: 2008-01-23 04:05:44 -0800 (Wed, 23 Jan 2008) Log Message: ----------- Started new learning algorithm implementation, based on the refinement operator approach, to test various ideas for improvements. In particular, the goal will be to use more knowledge about the specific examples in the algorithm run. [algorithm is not working yet] Modified Paths: -------------- trunk/lib/components.ini trunk/src/dl-learner/org/dllearner/cli/Start.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedHeuristic.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROComponent.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/FlexibleHeuristic.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/LexicographicHeuristic.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/NodeComparatorStable.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/package.html Modified: trunk/lib/components.ini =================================================================== --- trunk/lib/components.ini 2008-01-23 11:15:31 UTC (rev 417) +++ trunk/lib/components.ini 2008-01-23 12:05:44 UTC (rev 418) @@ -17,4 +17,4 @@ org.dllearner.algorithms.RandomGuesser org.dllearner.algorithms.BruteForceLearner org.dllearner.algorithms.refinement.ROLearner -org.dllearner.algorithms.gp.GP +org.dllearner.algorithms.refinement2.ExampleBasedROLearnerComponent \ No newline at end of file Added: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedHeuristic.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedHeuristic.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedHeuristic.java 2008-01-23 12:05:44 UTC (rev 418) @@ -0,0 +1,35 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.algorithms.refexamples; + +import java.util.Comparator; + +/** + * Marker interface for heuristics in the refinement operator + * based learning approach. A heuristic implements a method + * to decide which one of two given nodes seems to be more + * promising with respect to the learning problem we consider. + * + * @author Jens Lehmann + * + */ +public interface ExampleBasedHeuristic extends Comparator<ExampleBasedNode>{ + +} Added: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java 2008-01-23 12:05:44 UTC (rev 418) @@ -0,0 +1,194 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ + +package org.dllearner.algorithms.refexamples; + +import java.util.Set; +import java.util.TreeSet; + +import org.dllearner.core.dl.Concept; +import org.dllearner.core.dl.Individual; +import org.dllearner.utilities.ConceptComparator; + +/** + * + * Represents a node in the search tree. A node consists of + * the following parts: + * + * ... (see paper) ... + * + * @author Jens Lehmann + * + */ +public class ExampleBasedNode { + + // TODO: add example based variables here + @SuppressWarnings({"unused"}) + private Set<Individual> coveredPositives; + + // TOP ist einfach das TOP-Konzept, also das einzige welches nicht evaluiert wird + public enum QualityEvaluationMethod { TOP, REASONER, TOO_WEAK_LIST, OVERLY_GENERAL_LIST }; + + private QualityEvaluationMethod qualityEvaluationMethod = QualityEvaluationMethod.TOP; + + // alle Eigenschaften eines Knotens im Suchbaum + private Concept concept; + private int horizontalExpansion; + private int coveredNegativeExamples; + private boolean isTooWeak; + private boolean isQualityEvaluated; + private boolean isRedundant; + + private static ConceptComparator conceptComparator = new ConceptComparator(); + private static NodeComparatorStable nodeComparator = new NodeComparatorStable(); + + // Einbettung in Suchbaum + private ExampleBasedNode parent = null; + // private Set<Node> children = new HashSet<Node>(); + private Set<ExampleBasedNode> children = new TreeSet<ExampleBasedNode>(nodeComparator); + // es wird auch eine Liste von Kindern gehalten + private Set<Concept> childConcepts = new TreeSet<Concept>(conceptComparator); + + // verwendeter Operator für Expansion des Knotens + // private RefinementOperator operator; + + public ExampleBasedNode(Concept concept) { + this.concept = concept; + horizontalExpansion = 0; + isQualityEvaluated = false; + } + + public void setCoveredNegativeExamples(int coveredNegativeExamples) { + if(isQualityEvaluated) + throw new RuntimeException("Cannot set quality of a node more than once."); + this.coveredNegativeExamples = coveredNegativeExamples; + isQualityEvaluated = true; + } + + public void setHorizontalExpansion(int horizontalExpansion) { + this.horizontalExpansion = horizontalExpansion; + } + + public void setRedundant(boolean isRedundant) { + this.isRedundant = isRedundant; + } + + public void setTooWeak(boolean isTooWeak) { + if(isQualityEvaluated) + throw new RuntimeException("Cannot set quality of a node more than once."); + this.isTooWeak = isTooWeak; + isQualityEvaluated = true; + } + + public boolean addChild(ExampleBasedNode child) { + // child.setParent(this); + child.parent = this; + childConcepts.add(child.concept); + return children.add(child); + } + + public Concept getConcept() { + return concept; + } + public int getCoveredNegativeExamples() { + return coveredNegativeExamples; + } + public int getHorizontalExpansion() { + return horizontalExpansion; + } + public boolean isQualityEvaluated() { + return isQualityEvaluated; + } + public boolean isRedundant() { + return isRedundant; + } + public boolean isTooWeak() { + return isTooWeak; + } + + @Override + public String toString() { + String ret = concept.toString() + " [q:"; + if(isTooWeak) + ret += "tw"; + else + ret += coveredNegativeExamples; + ret += ", he:" + horizontalExpansion + ", children:" + children.size() + "]"; + return ret; + } + + // gibt die Refinement-Chain zurück, die zu dem Knoten geführt hat + public String getRefinementChainString() { + if(parent!=null) { + String ret = parent.getRefinementChainString(); + ret += " => " + concept.toString(); + return ret; + } else { + return concept.toString(); + } + } + + public String getTreeString() { + return getTreeString(0).toString(); + } + + private StringBuilder getTreeString(int depth) { + StringBuilder treeString = new StringBuilder(); + for(int i=0; i<depth-1; i++) + treeString.append(" "); + if(depth!=0) + // treeString.append("|-→ "); + treeString.append("|--> "); + treeString.append(getShortDescription()+"\n"); + for(ExampleBasedNode child : children) { + treeString.append(child.getTreeString(depth+1)); + } + return treeString; + } + + private String getShortDescription() { + String ret = concept.toString() + " [q:"; + + if(isTooWeak) + ret += "tw"; + else + ret += coveredNegativeExamples; + + ret += " ("+qualityEvaluationMethod+"), he:" + horizontalExpansion + "]"; + return ret; + } + + public Set<ExampleBasedNode> getChildren() { + return children; + } + + public Set<Concept> getChildConcepts() { + return childConcepts; + } + + public QualityEvaluationMethod getQualityEvaluationMethod() { + return qualityEvaluationMethod; + } + + public void setQualityEvaluationMethod(QualityEvaluationMethod qualityEvaluationMethod) { + this.qualityEvaluationMethod = qualityEvaluationMethod; + } + +} Added: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROComponent.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROComponent.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROComponent.java 2008-01-23 12:05:44 UTC (rev 418) @@ -0,0 +1,325 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ + +package org.dllearner.algorithms.refexamples; + +import java.io.File; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import org.dllearner.algorithms.refinement.RhoDown; +import org.dllearner.core.LearningAlgorithm; +import org.dllearner.core.LearningProblem; +import org.dllearner.core.ReasoningService; +import org.dllearner.core.Score; +import org.dllearner.core.config.BooleanConfigOption; +import org.dllearner.core.config.CommonConfigMappings; +import org.dllearner.core.config.CommonConfigOptions; +import org.dllearner.core.config.ConfigEntry; +import org.dllearner.core.config.ConfigOption; +import org.dllearner.core.config.DoubleConfigOption; +import org.dllearner.core.config.InvalidConfigOptionValueException; +import org.dllearner.core.config.StringConfigOption; +import org.dllearner.core.dl.AtomicConcept; +import org.dllearner.core.dl.AtomicRole; +import org.dllearner.core.dl.Concept; +import org.dllearner.learningproblems.PosNegLP; +import org.dllearner.learningproblems.PosOnlyDefinitionLP; +import org.dllearner.utilities.Files; +import org.dllearner.utilities.Helper; + +/** + * The DL-Learner learning algorithm component for the example + * based refinement operator approach. It handles all + * configuration options, creates the corresponding objects and + * passes them to the actual refinement operator, heuristic, and + * learning algorithm implementations. + * + * Note: The component is not working yet. + * + * Note: The options supported by the ROLearner component and this + * one are not equal. Options that have been dropped for now: + * - horizontal expansion factor: The goal of the algorithm will + * be to (hopefully) be able to learn long and complex concepts + * more efficiently. + * A horizontal expansion factor has its benefits, but limits + * the length of concepts learnable in reasonable time to + * about 15 with its default value of 0.6 and a small sized + * background knowledge base. We hope to get more fine-grained + * control of whether it makes sense to extend a node with + * more sophisticated heuristics. + * Dropping the horizontal expansion factor means that the + * completeness of the algorithm depends on the heuristic. + * + * @author Jens Lehmann + * + */ +public class ExampleBasedROComponent extends LearningAlgorithm { + + // actual algorithm + private ExampleBasedROLearner algorithm; + + // learning problem to solve and background knowledge + private ReasoningService rs; + private LearningProblem learningProblem; + + // configuration options + private boolean writeSearchTree; + private File searchTreeFile; + private boolean replaceSearchTree = false; + private static String defaultSearchTreeFile = "log/searchTree.txt"; + private String heuristic = "lexicographic"; + Set<AtomicConcept> allowedConcepts; + Set<AtomicRole> allowedRoles; + Set<AtomicConcept> ignoredConcepts; + Set<AtomicRole> ignoredRoles; + // these are computed as the result of the previous four settings + Set<AtomicConcept> usedConcepts; + Set<AtomicRole> usedRoles; + private boolean applyAllFilter = true; + private boolean applyExistsFilter = true; + private boolean useTooWeakList = true; + private boolean useOverlyGeneralList = true; + private boolean useShortConceptConstruction = true; + private boolean improveSubsumptionHierarchy = true; + private boolean useAllConstructor = true; + private boolean useExistsConstructor = true; + private boolean useNegation = true; + + // Variablen zur Einstellung der Protokollierung + // boolean quiet = false; + boolean showBenchmarkInformation = false; + // boolean createTreeString = false; + // String searchTree = new String(); + + // Konfiguration des Algorithmus + // Faktor für horizontale Erweiterung (notwendig für completeness) + // double horizontalExpansionFactor = 0.6; + + // soll später einen Operator und eine Heuristik entgegennehmen + // public ROLearner(LearningProblem learningProblem, LearningProblem learningProblem2) { + public ExampleBasedROComponent(PosNegLP learningProblem, ReasoningService rs) { + this.learningProblem = learningProblem; + this.rs = rs; + } + + public ExampleBasedROComponent(PosOnlyDefinitionLP learningProblem, ReasoningService rs) { + this.learningProblem = learningProblem; + this.rs = rs; + } + + public static Collection<Class<? extends LearningProblem>> supportedLearningProblems() { + Collection<Class<? extends LearningProblem>> problems = new LinkedList<Class<? extends LearningProblem>>(); + problems.add(PosNegLP.class); + problems.add(PosOnlyDefinitionLP.class); + return problems; + } + + public static Collection<ConfigOption<?>> createConfigOptions() { + Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); + options.add(new BooleanConfigOption("writeSearchTree", "specifies whether to write a search tree", false)); + options.add(new StringConfigOption("searchTreeFile","file to use for the search tree", defaultSearchTreeFile)); + options.add(new BooleanConfigOption("replaceSearchTree","specifies whether to replace the search tree in the log file after each run or append the new search tree", false)); + StringConfigOption heuristicOption = new StringConfigOption("heuristic", "specifiy the heuristic to use", "lexicographic"); + heuristicOption.setAllowedValues(new String[] {"lexicographic", "flexible"}); + options.add(heuristicOption); + options.add(new BooleanConfigOption("applyAllFilter", "usage of equivalence ALL R.C AND ALL R.D = ALL R.(C AND D)", true)); + options.add(new BooleanConfigOption("applyExistsFilter", "usage of equivalence EXISTS R.C OR EXISTS R.D = EXISTS R.(C OR D)", true)); + options.add(new BooleanConfigOption("useTooWeakList", "try to filter out too weak concepts without sending them to the reasoner", true)); + options.add(new BooleanConfigOption("useOverlyGeneralList", "try to find overly general concept without sending them to the reasoner", true)); + options.add(new BooleanConfigOption("useShortConceptConstruction", "shorten concept to see whether they already exist", true)); + DoubleConfigOption horizExp = new DoubleConfigOption("horizontalExpansionFactor", "horizontal expansion factor (see publication for description)", 0.6); + horizExp.setLowerLimit(0.0); + horizExp.setUpperLimit(1.0); + options.add(horizExp); + options.add(new BooleanConfigOption("improveSubsumptionHierarchy", "simplify subsumption hierarchy to reduce search space (see publication for description)", true)); + // allowed/ignored concepts/roles could also be a reasoner option (?) + options.add(CommonConfigOptions.allowedConcepts()); + options.add(CommonConfigOptions.ignoredConcepts()); + options.add(CommonConfigOptions.allowedRoles()); + options.add(CommonConfigOptions.ignoredRoles()); + options.add(CommonConfigOptions.useAllConstructor()); + options.add(CommonConfigOptions.useExistsConstructor()); + options.add(CommonConfigOptions.useNegation()); + return options; + } + + /* (non-Javadoc) + * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) + */ + @Override + @SuppressWarnings({"unchecked"}) + public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { + String name = entry.getOptionName(); + if(name.equals("writeSearchTree")) + writeSearchTree = (Boolean) entry.getValue(); + else if(name.equals("searchTreeFile")) + searchTreeFile = new File((String)entry.getValue()); + else if(name.equals("replaceSearchTree")) + replaceSearchTree = (Boolean) entry.getValue(); + else if(name.equals("heuristic")) { + String value = (String) entry.getValue(); + if(value.equals("lexicographic")) + heuristic = "lexicographic"; + else + heuristic = "flexible"; + } else if(name.equals("allowedConcepts")) { + allowedConcepts = CommonConfigMappings.getAtomicConceptSet((Set<String>)entry.getValue()); + } else if(name.equals("allowedRoles")) { + allowedRoles = CommonConfigMappings.getAtomicRoleSet((Set<String>)entry.getValue()); + } else if(name.equals("ignoredConcepts")) { + ignoredConcepts = CommonConfigMappings.getAtomicConceptSet((Set<String>)entry.getValue()); + } else if(name.equals("ignoredRoles")) { + ignoredRoles = CommonConfigMappings.getAtomicRoleSet((Set<String>)entry.getValue()); + } else if(name.equals("applyAllFilter")) { + applyAllFilter = (Boolean) entry.getValue(); + } else if(name.equals("applyExistsFilter")) { + applyExistsFilter = (Boolean) entry.getValue(); + } else if(name.equals("useTooWeakList")) { + useTooWeakList = (Boolean) entry.getValue(); + } else if(name.equals("useOverlyGeneralList")) { + useOverlyGeneralList = (Boolean) entry.getValue(); + } else if(name.equals("useShortConceptConstruction")) { + useShortConceptConstruction = (Boolean) entry.getValue(); + } else if(name.equals("improveSubsumptionHierarchy")) { + improveSubsumptionHierarchy = (Boolean) entry.getValue(); + } else if(name.equals("useAllConstructor")) { + useAllConstructor = (Boolean) entry.getValue(); + } else if(name.equals("useExistsConstructor")) { + useExistsConstructor = (Boolean) entry.getValue(); + } else if(name.equals("useNegation")) { + useNegation = (Boolean) entry.getValue(); + } + + } + + /* (non-Javadoc) + * @see org.dllearner.core.Component#init() + */ + @Override + public void init() { + if(searchTreeFile == null) + searchTreeFile = new File(defaultSearchTreeFile); + + if(writeSearchTree) + Files.clearFile(searchTreeFile); + + // adjust heuristic + ExampleBasedHeuristic algHeuristic; + + if(heuristic == "lexicographic") + algHeuristic = new LexicographicHeuristic(); + else { + if(learningProblem instanceof PosOnlyDefinitionLP) { + throw new RuntimeException("does not work with positive examples only yet"); + } + algHeuristic = null; + // algHeuristic = new FlexibleHeuristic(learningProblem.getNegativeExamples().size(), learningProblem.getPercentPerLengthUnit()); + } + + // compute used concepts/roles from allowed/ignored + // concepts/roles + if(allowedConcepts != null) { + // sanity check to control if no non-existing concepts are in the list + Helper.checkConcepts(rs, allowedConcepts); + usedConcepts = allowedConcepts; + } else if(ignoredConcepts != null) { + usedConcepts = Helper.computeConceptsUsingIgnoreList(rs, ignoredConcepts); + } else { + usedConcepts = Helper.computeConcepts(rs); + } + + if(allowedRoles != null) { + Helper.checkRoles(rs, allowedRoles); + usedRoles = allowedRoles; + } else if(ignoredRoles != null) { + Helper.checkRoles(rs, ignoredRoles); + usedRoles = Helper.difference(rs.getAtomicRoles(), ignoredRoles); + } else { + usedRoles = rs.getAtomicRoles(); + } + + // prepare subsumption and role hierarchies, because they are needed + // during the run of the algorithm + rs.prepareSubsumptionHierarchy(usedConcepts); + if(improveSubsumptionHierarchy) + rs.getSubsumptionHierarchy().improveSubsumptionHierarchy(); + rs.prepareRoleHierarchy(usedRoles); + + // create a refinement operator and pass all configuration + // variables to it + RhoDown operator = new RhoDown( + rs, + applyAllFilter, + applyExistsFilter, + useAllConstructor, + useExistsConstructor, + useNegation + ); + + // create an algorithm object and pass all configuration + // options to it + algorithm = new ExampleBasedROLearner( + learningProblem, + operator, + algHeuristic, + usedConcepts, + usedRoles, + writeSearchTree, + replaceSearchTree, + searchTreeFile, + useTooWeakList, + useOverlyGeneralList, + useShortConceptConstruction + ); + } + + public static String getName() { + return "example driven refinement operator based learning algorithm [not working]"; + } + + @Override + public void start() { + algorithm.start(); + } + + @Override + public Score getSolutionScore() { + return algorithm.getSolutionScore(); + } + + @Override + public Concept getBestSolution() { + return algorithm.getBestSolution(); + } + + @Override + public synchronized List<Concept> getBestSolutions(int nrOfSolutions) { + return algorithm.getBestSolutions(nrOfSolutions); + } + + @Override + public void stop() { + algorithm.stop(); + } + +} Added: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java 2008-01-23 12:05:44 UTC (rev 418) @@ -0,0 +1,839 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ +package org.dllearner.algorithms.refexamples; + +import java.io.File; +import java.text.DecimalFormat; +import java.util.Comparator; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; + +import org.dllearner.algorithms.refinement.RefinementOperator; +import org.dllearner.algorithms.refinement.RhoDown; +import org.dllearner.core.LearningProblem; +import org.dllearner.core.ReasoningService; +import org.dllearner.core.Score; +import org.dllearner.core.dl.AtomicConcept; +import org.dllearner.core.dl.AtomicRole; +import org.dllearner.core.dl.Concept; +import org.dllearner.core.dl.MultiConjunction; +import org.dllearner.core.dl.MultiDisjunction; +import org.dllearner.core.dl.Top; +import org.dllearner.learningproblems.PosNegLP; +import org.dllearner.learningproblems.PosOnlyDefinitionLP; +import org.dllearner.utilities.ConceptComparator; +import org.dllearner.utilities.ConceptTransformation; +import org.dllearner.utilities.Files; +import org.dllearner.utilities.Helper; + +/** + * Implements the example based refinement operator learning + * approach. + * + * @author Jens Lehmann + * + */ +public class ExampleBasedROLearner { + + // all configuration options, which were passed to the + // learning algorithms + private boolean writeSearchTree; + private File searchTreeFile; + private boolean replaceSearchTree = false; + Set<AtomicConcept> allowedConcepts; + Set<AtomicRole> allowedRoles; + Set<AtomicConcept> ignoredConcepts; + Set<AtomicRole> ignoredRoles; + // these are computed as the result of the previous four settings + Set<AtomicConcept> usedConcepts; + Set<AtomicRole> usedRoles; + + private boolean useTooWeakList = true; + private boolean useOverlyGeneralList = true; + private boolean useShortConceptConstruction = true; + private double horizontalExpansionFactor = 0.6; + + + private boolean quiet = false; + + private boolean stop = false; + + private ReasoningService rs; + + private PosNegLP learningProblem; + private PosOnlyDefinitionLP posOnlyLearningProblem; + private boolean posOnly = false; + + // non-configuration variables + + // Lösungen protokollieren + boolean solutionFound = false; + List<Concept> solutions = new LinkedList<Concept>(); + + // verwendeter Refinement-Operator (momentan werden für Statistik RhoDown-spezifische + // Sachen abgefragt) + // RefinementOperator operator; + RhoDown operator; + + // Variablen zur Einstellung der Protokollierung + // boolean quiet = false; + boolean showBenchmarkInformation = false; + // boolean createTreeString = false; + // String searchTree = new String(); + + // Konfiguration des Algorithmus + // Faktor für horizontale Erweiterung (notwendig für completeness) + // double horizontalExpansionFactor = 0.6; + + + + private Comparator<ExampleBasedNode> nodeComparator; + private NodeComparatorStable nodeComparatorStable = new NodeComparatorStable(); + private ConceptComparator conceptComparator = new ConceptComparator(); + DecimalFormat df = new DecimalFormat(); + + // Menge von Kandidaten für Refinement + // (wird für Direktzugriff auf Baumknoten verwendet) + private TreeSet<ExampleBasedNode> candidates; + // während eines Durchlaufs neu gefundene Knoten + private List<ExampleBasedNode> newCandidates = new LinkedList<ExampleBasedNode>(); + // stabiles candidate set, da sich die Knoten nach dem einfügen nicht + // verschieben können => das Set enthält nicht die aktuellen horizontal + // expansions, es dient nur dazu die besten Konzepte zu speichern; hat also + // keine Funktion im Algorithmus + private TreeSet<ExampleBasedNode> candidatesStable = new TreeSet<ExampleBasedNode>(nodeComparatorStable); + // vorhandene Konzepte, die irgendwann als proper eingestuft worden + private SortedSet<Concept> properRefinements = new TreeSet<Concept>(conceptComparator); + // speichert Konzept und deren Evaluierung, um sie leicht wiederzufinden für + // Strategien wie Konzeptverkürzung etc. + // Zahl = covered negatives, -1 = too weak + // private Map<Concept, Integer> evaluationCache = new TreeMap<Concept, Integer>(conceptComparator); + // Blacklists + private SortedSet<Concept> tooWeakList = new TreeSet<Concept>(conceptComparator); + private SortedSet<Concept> overlyGeneralList = new TreeSet<Concept>(conceptComparator); + + TreeSet<ExampleBasedNode> expandedNodes = new TreeSet<ExampleBasedNode>(nodeComparatorStable); + + // statistische Variablen + private int maxRecDepth = 0; + private int maxNrOfRefinements = 0; + private int maxNrOfChildren = 0; + private int redundantConcepts = 0; + int maximumHorizontalExpansion; + int minimumHorizontalExpansion; + // private int propernessTests = 0; + private int propernessTestsReasoner = 0; + private int propernessTestsAvoidedByShortConceptConstruction = 0; + private int propernessTestsAvoidedByTooWeakList = 0; + private int conceptTestsTooWeakList = 0; + private int conceptTestsOverlyGeneralList = 0; + private int conceptTestsReasoner = 0; + + // Zeitvariablen + private long algorithmStartTime; + private long propernessCalcTimeNs = 0; + private long propernessCalcReasoningTimeNs = 0; + private long childConceptsDeletionTimeNs = 0; + private long refinementCalcTimeNs = 0; + private long redundancyCheckTimeNs = 0; + private long evaluateSetCreationTimeNs = 0; + private long improperConceptsRemovalTimeNs = 0; + long someTimeNs = 0; + int someCount = 0; + + public ExampleBasedROLearner( + LearningProblem learningProblem, + RefinementOperator operator, + ExampleBasedHeuristic heuristic, + Set<AtomicConcept> allowedConcepts, + Set<AtomicRole> allowedRoles, + boolean writeSearchTree, + boolean replaceSearchTree, + File searchTreeFile, + boolean useTooWeakList, + boolean useOverlyGeneralList, + boolean useShortConceptConstruction + ) { + if(learningProblem instanceof PosNegLP) { + this.learningProblem = (PosNegLP) learningProblem; + posOnly = false; + } else if(learningProblem instanceof PosOnlyDefinitionLP) { + this.posOnlyLearningProblem = (PosOnlyDefinitionLP) learningProblem; + posOnly = true; + } + + // candidate sets entsprechend der gewählten Heuristik initialisieren + candidates = new TreeSet<ExampleBasedNode>(nodeComparator); + // newCandidates = new TreeSet<Node>(nodeComparator); + } + + public void start() { + // Suche wird mit Top-Konzept gestartet + Top top = new Top(); + ExampleBasedNode topNode = new ExampleBasedNode(top); + // int coveredNegativeExamples = learningProblem.coveredNegativeExamplesOrTooWeak(top); + // aus Top folgen immer alle negativen Beispiele, d.h. es ist nur eine Lösung, wenn + // es keine negativen Beispiele gibt + int coveredNegativeExamples = getNumberOfNegatives(); + topNode.setCoveredNegativeExamples(coveredNegativeExamples); + // topNode.setHorizontalExpansion(1); // die 0 ist eigentlich richtig, da keine Refinements + // der Länge 1 untersucht wurden + candidates.add(topNode); + candidatesStable.add(topNode); + // Abbruchvariable => beachten, dass bereits TOP eine Lösung sein kann + solutionFound = (coveredNegativeExamples == 0); + solutions = new LinkedList<Concept>(); + if(solutionFound) + solutions.add(top); + + int loop = 0; + + // Voreinstellung für horizontal expansion + maximumHorizontalExpansion = 0; + minimumHorizontalExpansion = 0; + + algorithmStartTime = System.nanoTime(); + + // TODO: effizienter Traversal der Subsumption-Hierarchie + // TODO: Äquivalenzen nutzen + // TODO: Gibt es auch eine andere Abbruchbedingung? Es könnte sein, dass irgendwann keine + // proper refinements mehr gefunden werden, aber wie stelle man das fest? + while(!solutionFound && !stop) { + + if(!quiet) + printStatistics(false); + + // besten Knoten nach Heuristik auswählen + ExampleBasedNode bestNode = candidates.last(); + // besten Knoten erweitern + // newCandidates = new TreeSet<Node>(nodeComparator); + newCandidates.clear(); + candidates.remove(bestNode); + extendNodeProper(bestNode, bestNode.getHorizontalExpansion()+1); + candidates.add(bestNode); + candidates.addAll(newCandidates); + candidatesStable.addAll(newCandidates); + + // minimum horizontal expansion berechnen + if(bestNode.getHorizontalExpansion()>maximumHorizontalExpansion) + maximumHorizontalExpansion = bestNode.getHorizontalExpansion(); + minimumHorizontalExpansion = (int) Math.floor(horizontalExpansionFactor*maximumHorizontalExpansion); + + // neu: es werden solange Knoten erweitert bis wirklich jeder Knoten die + // notwendige minimum horizontal expansion hat + boolean nodesExpanded; + do { + nodesExpanded = false; + + + // es darf nicht candidatesStable geklont werden, da diese Menge nicht + // aktualisiert wird, also die falschen horizontal expansions vorliegen + // TODO: bei Tests war die Performance der clone-Operation ganz gut, aber + // es skaliert natürlich nicht so gut mit größer werdenden candidate set + // => Lösung ist vielleicht einfach einen Iterator zu verwenden und das + // aktuelle Konzept gleich hier zu löschen (wird dann bei expansion wieder + // hinzugefügt) + // TreeSet<Node> candidatesClone = (TreeSet<Node>) candidates.clone(); + newCandidates.clear(); + + + // for(Node candidate : candidatesClone) { + Iterator<ExampleBasedNode> it = candidates.iterator(); + List<ExampleBasedNode> changedNodes = new LinkedList<ExampleBasedNode>(); + while(it.hasNext()){ + ExampleBasedNode candidate = it.next(); + // alle Kandidaten, die nicht too weak sind und unter minimumHorizontalExpansion + // liegen, werden erweitert + if(!candidate.isTooWeak() && candidate.getHorizontalExpansion()<minimumHorizontalExpansion) { + // Vorsicht, falls candidates irgendwann in extendProper benutzt + // werden sollten! Es könnten auf diese Weise Knoten fehlen + // (momentan wird candidates nur zur Auswahl des besten Knotens + // benutzt). + it.remove(); + + extendNodeProper(candidate, minimumHorizontalExpansion); + nodesExpanded = true; + + changedNodes.add(candidate); + } + } + + long someTimeStart = System.nanoTime(); + someCount++; + // geänderte temporär entfernte Knoten wieder hinzufügen + candidates.addAll(changedNodes); + // neu gefundene Knoten hinzufügen + candidates.addAll(newCandidates); + candidatesStable.addAll(newCandidates); + someTimeNs += System.nanoTime() - someTimeStart; + + } while(nodesExpanded && !stop); + + //System.out.println("candidate set:"); + //for(Node n : candidates) { + // System.out.println(n); + //} + + if(writeSearchTree) { + // String treeString = ""; + String treeString = "best expanded node: " + bestNode+ "\n"; + if(expandedNodes.size()>1) { + treeString += "all expanded nodes:\n"; // due to minimum horizontal expansion:\n"; + for(ExampleBasedNode n : expandedNodes) { + treeString += " " + n + "\n"; + } + } + expandedNodes.clear(); + treeString += "horizontal expansion: " + minimumHorizontalExpansion + " to " + maximumHorizontalExpansion + "\n"; + treeString += topNode.getTreeString(); + treeString += "\n"; + // System.out.println(treeString); + // searchTree += treeString + "\n"; + // TODO: ev. immer nur einen search tree speichern und den an die + // Datei anhängen => spart Speicher + if(replaceSearchTree) + Files.createFile(searchTreeFile, treeString); + else + Files.appendFile(searchTreeFile, treeString); + } + + // Anzahl Schleifendurchläufe + loop++; + + if(!quiet) + System.out.println("--- loop " + loop + " finished ---"); + + } + + // Suchbaum in Datei schreiben +// if(writeSearchTree) +// Files.createFile(searchTreeFile, searchTree); + + // Ergebnisausgabe + /* + System.out.println("candidate set:"); + for(Node n : candidates) { + System.out.println(n); + }*/ + + // Set<Concept> solutionsSorted = new TreeSet(conceptComparator); + // solutionsSorted.addAll(solutions); + + // System.out.println("retrievals:"); + // for(Concept c : ReasoningService.retrievals) { + // System.out.println(c); + // } + + if(solutionFound) { + System.out.println(); + System.out.println("solutions:"); + for(Concept c : solutions) { + System.out.println(" " + c + " (length " + c.getLength() +", depth " + c.getDepth() + ")"); + } + } + System.out.println("horizontal expansion: " + minimumHorizontalExpansion + " to " + maximumHorizontalExpansion); + System.out.println("size of candidate set: " + candidates.size()); + printStatistics(true); + + if(stop) + System.out.println("Algorithm stopped."); + else + System.out.println("Algorithm terminated succesfully."); + } + + private void extendNodeProper(ExampleBasedNode node, int maxLength) { + // Rekursionsanfang ist das Konzept am Knoten selbst; danach wird der Operator + // so lange darauf angewandt bis alle proper refinements bis zu maxLength + // gefunden wurden + long propCalcNsStart = System.nanoTime(); + + if(writeSearchTree) + expandedNodes.add(node); + + if(node.getChildren().size()>maxNrOfChildren) + maxNrOfChildren = node.getChildren().size(); + + // Knoten in instabiler Menge muss aktualisiert werden + // => wird jetzt schon vom Algorithmus entfernt + /* + boolean remove = candidates.remove(node); + + if(!remove) { + System.out.println(candidates); + System.out.println(candidatesStable); + System.out.println(node); + + throw new RuntimeException("remove failed"); + }*/ + + extendNodeProper(node, node.getConcept(), maxLength, 0); + node.setHorizontalExpansion(maxLength); + + // wird jetzt schon im Kernalgorithmus hinzugefügt + /* + boolean add = candidates.add(node); + if(!add) { + throw new RuntimeException("add failed"); + }*/ + + // Knoten wird entfernt und wieder hinzugefügt, da sich seine + // Position geändert haben könnte => geht noch nicht wg. ConcurrentModification + // falls Knoten wg. min. horiz. exp. expandiert werden + // candidates.remove(node); + // candidates.add(node); + propernessCalcTimeNs += (System.nanoTime()-propCalcNsStart); + } + + + + // für alle proper refinements von concept bis maxLength werden Kinderknoten + // für node erzeugt; + // recDepth dient nur zur Protokollierung + private void extendNodeProper(ExampleBasedNode node, Concept concept, int maxLength, int recDepth) { + + // führe Methode nicht aus, wenn Algorithmus gestoppt wurde (alle rekursiven Funktionsaufrufe + // werden nacheinander abgebrochen, so dass ohne weitere Reasoninganfragen relativ schnell beendet wird) + if(stop) + return; + + if(recDepth > maxRecDepth) + maxRecDepth = recDepth; + + // Refinements berechnen => hier dürfen dürfen refinements <= horizontal expansion + // des Konzepts nicht gelöscht werden! + long refinementCalcTimeNsStart = System.nanoTime(); + Set<Concept> refinements = operator.refine(concept, maxLength, null); + refinementCalcTimeNs += System.nanoTime() - refinementCalcTimeNsStart; + + if(refinements.size()>maxNrOfRefinements) + maxNrOfRefinements = refinements.size(); + + long childConceptsDeletionTimeNsStart = System.nanoTime(); + // entferne aus den refinements alle Konzepte, die bereits Kinder des Knotens sind + // for(Node n : node.getChildren()) { + // refinements.remove(n.getConcept()); + // } + + // das ist viel schneller, allerdings bekommt man ein anderes candidate set(??) + refinements.removeAll(node.getChildConcepts()); + + childConceptsDeletionTimeNs += System.nanoTime() - childConceptsDeletionTimeNsStart; + + long evaluateSetCreationTimeNsStart = System.nanoTime(); + + // alle Konzepte, die länger als horizontal expansion sind, müssen ausgewertet + // werden + Set<Concept> toEvaluateConcepts = new TreeSet<Concept>(conceptComparator); + Iterator<Concept> it = refinements.iterator(); + // for(Concept refinement : refinements) { + while(it.hasNext()) { + Concept refinement = it.next(); + if(refinement.getLength()>node.getHorizontalExpansion()) { + // TODO: an dieser Stelle könnte man Algorithmen ansetzen lassen, die + // versuchen properness-Anfragen zu vermeiden: + // 1. Konzept kürzen und schauen, ob es Mutterkonzept entspricht + // 2. Blacklist, die überprüft, ob Konzept too weak ist + // (dann ist es auch proper) + + // sagt aus, ob festgestellt wurde, ob refinement proper ist + // (sagt nicht aus, dass das refinement proper ist!) + boolean propernessDetected = false; + + // 1. short concept construction + if(useShortConceptConstruction) { + // kurzes Konzept konstruieren + Concept shortConcept = ConceptTransformation.getShortConcept(refinement, conceptComparator); + int n = conceptComparator.compare(shortConcept, concept); + + // Konzepte sind gleich also Refinement improper + if(n==0) { + propernessTestsAvoidedByShortConceptConstruction++; + propernessDetected = true; + } + } + + // 2. too weak test + if(!propernessDetected && useTooWeakList) { + if(refinement instanceof MultiConjunction) { + boolean tooWeakElement = containsTooWeakElement((MultiConjunction)refinement); + if(tooWeakElement) { + propernessTestsAvoidedByTooWeakList++; + conceptTestsTooWeakList++; + propernessDetected = true; + tooWeakList.add(refinement); + + // Knoten wird direkt erzeugt (es ist buganfällig zwei Plätze + // zu haben, an denen Knoten erzeugt werden, aber es erscheint + // hier am sinnvollsten) + properRefinements.add(refinement); + tooWeakList.add(refinement); + + ExampleBasedNode newNode = new ExampleBasedNode(refinement); + newNode.setHorizontalExpansion(refinement.getLength()-1); + newNode.setTooWeak(true); + newNode.setQualityEvaluationMethod(ExampleBasedNode.QualityEvaluationMethod.TOO_WEAK_LIST); + node.addChild(newNode); + + // Refinement muss gelöscht werden, da es proper ist + it.remove(); + } + } + } + + // properness konnte nicht vorher ermittelt werden + if(!propernessDetected) + toEvaluateConcepts.add(refinement); + + + } + } + evaluateSetCreationTimeNs += System.nanoTime() - evaluateSetCreationTimeNsStart; + + // System.out.println(toEvaluateConcepts.size()); + + Set<Concept> improperConcepts = null; + if(toEvaluateConcepts.size()>0) { + // Test aller Konzepte auf properness (mit DIG in nur einer Anfrage) + long propCalcReasoningStart = System.nanoTime(); + improperConcepts = rs.subsumes(toEvaluateConcepts, concept); + propernessTestsReasoner+=toEvaluateConcepts.size(); + // boolean isProper = !learningProblem.getReasoningService().subsumes(refinement, concept); + propernessCalcReasoningTimeNs += System.nanoTime() - propCalcReasoningStart; + } + + long improperConceptsRemovalTimeNsStart = System.nanoTime(); + // die improper Konzepte werden von den auszuwertenden gelöscht, d.h. + // alle proper concepts bleiben übrig (einfache Umbenennung) + if(improperConcepts != null) + toEvaluateConcepts.removeAll(improperConcepts); + Set<Concept> properConcepts = toEvaluateConcepts; + // alle proper concepts von refinements löschen + refinements.removeAll(properConcepts); + improperConceptsRemovalTimeNs += System.nanoTime() - improperConceptsRemovalTimeNsStart; + + for(Concept refinement : properConcepts) { + long redundancyCheckTimeNsStart = System.nanoTime(); + boolean nonRedundant = properRefinements.add(refinement); + redundancyCheckTimeNs += System.nanoTime() - redundancyCheckTimeNsStart; + + if(!nonRedundant) + redundantConcepts++; + + // es wird nur ein neuer Knoten erzeugt, falls das Konzept nicht + // schon existiert + if(nonRedundant) { + + // Knoten erzeugen + ExampleBasedNode newNode = new ExampleBasedNode(refinement); + // die -1 ist wichtig, da sonst keine gleich langen Refinements + // für den neuen Knoten erlaubt wären z.B. person => male + newNode.setHorizontalExpansion(refinement.getLength()-1); + + + // hier finden Tests statt, die Retrieval-Anfrage vermeiden sollen + /* + Integer n = evaluationCache.get(concept); + // Konzept gefunden + if(n!=null) { + // Knoten erzeugen + Node newNode = new Node(refinement); + newNode.setHorizontalExpansion(refinement.getLength()-1); + node.addChild(newNode); + + // too weak + if(n==-1) { + newNode.setTooWeak(true); + // nicht too weak + } else { + // feststellen, ob proper => geht so nicht + // gleiche covered negatives bedeutet nicht improper + boolean proper = (n==node.getCoveredNegativeExamples()); + newNode.setCoveredNegativeExamples(n); + + } + // Konzept nicht gefunden => muss ausgewertet werden + } else { + toEvaluateConcepts.add(refinement); + } + */ + + boolean qualityKnown = false; + int quality = -2; + + // overly general list verwenden + if(useOverlyGeneralList && refinement instanceof MultiDisjunction) { + if(containsOverlyGeneralElement((MultiDisjunction)refinement)) { + conceptTestsOverlyGeneralList++; + quality = getNumberOfNegatives(); + qualityKnown = true; + newNode.setQualityEvaluationMethod(ExampleBasedNode.QualityEvaluationMethod.OVERLY_GENERAL_LIST); + } + } + + // Qualität des Knotens auswerten + if(!qualityKnown) { + long propCalcReasoningStart2 = System.nanoTime(); + conceptTestsReasoner++; + quality = coveredNegativesOrTooWeak(refinement); + propernessCalcReasoningTimeNs += System.nanoTime() - propCalcReasoningStart2; + newNode.setQualityEvaluationMethod(ExampleBasedNode.QualityEvaluationMethod.REASONER); + } + + if(quality == -1) { + newNode.setTooWeak(true); + // Blacklist für too weak concepts + tooWeakList.add(refinement); + } else { + // Lösung gefunden + if(quality == 0) { + solutionFound = true; + solutions.add(refinement); + } + + newNode.setCoveredNegativeExamples(quality); + newCandidates.add(newNode); + // candidates.add(newNode); + // candidatesStable.add(newNode); + + + if(quality == getNumberOfNegatives()) + overlyGeneralList.add(refinement); + + // System.out.print("."); + } + + node.addChild(newNode); + } + } + + + /* + Iterator<Concept> it = refinements.iterator(); + while(it.hasNext()) { + Concept refinement = it.next(); + if(refinement.getLength()>node.getHorizontalExpansion()) { + // Test auf properness + long propCalcReasoningStart = System.nanoTime(); + boolean isProper = !learningProblem.getReasoningService().subsumes(refinement, concept); + propernessCalcReasoningTimeNs += System.nanoTime() - propCalcReasoningStart; + + if(isProper) { + long redundancyCheckTimeNsStart = System.nanoTime(); + boolean nonRedundant = properRefinements.add(refinement); + redundancyCheckTimeNs += System.nanoTime() - redundancyCheckTimeNsStart; + + if(!nonRedundant) + redundantConcepts++; + + // es wird nur ein neuer Knoten erzeugt, falls das Konzept nicht + // schon existiert + if(nonRedundant) { + + // Knoten erzeugen + Node newNode = new Node(refinement); + // die -1 ist wichtig, da sonst keine gleich langen Refinements + // für den neuen Knoten erlaubt wären z.B. person => male + newNode.setHorizontalExpansion(refinement.getLength()-1); + node.addChild(newNode); + + // Qualität des Knotens auswerten + long propCalcReasoningStart2 = System.nanoTime(); + int quality = learningProblem.coveredNegativeExamplesOrTooWeak(refinement); + propernessCalcReasoningTimeNs += System.nanoTime() - propCalcReasoningStart2; + + if(quality == -1) { + newNode.setTooWeak(true); + } else { + // Lösung gefunden + if(quality == 0) { + solutionFound = true; + solutions.add(refinement); + } + + newNode.setCoveredNegativeExamples(quality); + newCandidates.add(newNode); + + // System.out.print("."); + } + } + + // jedes proper Refinement wird gelöscht + it.remove(); + + } + } + } + */ + + + + // es sind jetzt noch alle Konzepte übrig, die improper refinements sind + // auf jedem dieser Konzepte wird die Funktion erneut aufgerufen, da sich + // proper refinements ergeben könnten + for(Concept refinement : refinements) { + // for(int i=0; i<=recDepth; i++) + // System.out.print(" "); + // System.out.println("call: " + refinement + " [maxLength " + maxLength + "]"); + extendNodeProper(node, refinement, maxLength, recDepth+1); + // for(int i=0; i<=recDepth; i++) + // System.out.print(" "); + // System.out.println("finished: " + refinement + " [maxLength " + maxLength + "]"); + } + } + + private void printStatistics(boolean finalStats) { + // TODO: viele Tests haben ergeben, dass man nie 100% mit der Zeitmessung abdecken + // kann (zum einen weil Stringausgabe verzögert erfolgt und zum anderen weil + // Funktionsaufrufe, garbage collection, Zeitmessung selbst auch Zeit benötigt); + // es empfiehlt sich folgendes Vorgehen: + // - Messung der Zeit eines Loops im Algorithmus + // - Messung der Zeit für alle node extensions innerhalb eines Loops + // => als Normalisierungsbasis empfehlen sich dann die Loopzeit statt + // Algorithmuslaufzeit + // ... momentan kann es aber auch erstmal so lassen + + long algorithmRuntime = System.nanoTime() - algorithmStartTime; + + if(!finalStats) { + // Refinementoperator auf Konzept anwenden + String bestNodeString = "currently best node: " + candidatesStable.last(); + // searchTree += bestNodeString + "\n"; + System.out.println(bestNodeString); + String expandedNodeString = "next expanded node: " + candidates.last(); + // searchTree += expandedNodeString + "\n"; + System.out.println(expandedNodeString); + System.out.println("algorithm runtime " + Helper.prettyPrintNanoSeconds(algorithmRuntime)); + String expansionString = "horizontal expansion: " + minimumHorizontalExpansion + " to " + maximumHorizontalExpansion; + // searchTree += expansionString + "\n"; + System.out.println(expansionString); + System.out.println("size of candidate set: " + candidates.size()); + // System.out.println("properness max recursion depth: " + maxRecDepth); + // System.out.println("max. number of one-step refinements: " + maxNrOfRefinements); + // System.out.println("max. number of children of a node: " + maxNrOfChildren); + } + + if(showBenchmarkInformation) { + + + long reasoningTime = rs.getOverallReasoningTimeNs(); + double reasoningPercentage = 100 * reasoningTime/(double)algorithmRuntime; + long propWithoutReasoning = propernessCalcTimeNs-propernessCalcReasoningTimeNs; + double propPercentage = 100 * propWithoutReasoning/(double)algorithmRuntime; + double deletionPercentage = 100 * childConceptsDeletionTimeNs/(double)algorithmRuntime; + long subTime = rs.getSubsumptionReasoningTimeNs(); + double subPercentage = 100 * subTime/(double)algorithmRuntime; + double refinementPercentage = 100 * refinementCalcTimeNs/(double)algorithmRuntime; + double redundancyCheckPercentage = 100 * redundancyCheckTimeNs/(double)algorithmRuntime; + double evaluateSetCreationTimePercentage = 100 * evaluateSetCreationTimeNs/(double)algorithmRuntime; + double improperConceptsRemovalTimePercentage = 100 * improperConceptsRemovalTimeNs/(double)algorithmRuntime; + double mComputationTimePercentage = 100 * operator.mComputationTimeNs/(double)algorithmRuntime; + double topComputationTimePercentage = 100 * operator.topComputationTimeNs/(double)algorithmRuntime; + double cleanTimePercentage = 100 * ConceptTransformation.cleaningTimeNs/(double)algorithmRuntime; + double onnfTimePercentage = 100 * ConceptTransformation.onnfTimeNs/(double)algorithmRuntime; + double shorteningTimePercentage = 100 * ConceptTransformation.shorteningTimeNs/(double)algorithmRuntime; + + // nur temporär + double someTimePercentage = 100 * someTimeNs/(double)algorithmRuntime; + + System.out.println("reasoning percentage: " + df.format(reasoningPercentage) + "%"); + System.out.println(" subsumption check time: " + df.format(subPercentage) + "%"); + System.out.println("proper calculation percentage (wo. reasoning): " + df.format(propPercentage) + "%"); + System.out.println(" deletion time percentage: " + df.format(deletionPercentage) + "%"); + System.out.println(" refinement calculation percentage: " + df.format(refinementPercentage) + "%"); + System.out.println(" some time percentage: " + df.format(someTimePercentage) + "% " + Helper.prettyPrintNanoSeconds(someTimeNs) + " " + someCount + " times"); + System.out.println(" m calculation percentage: " + df.format(mComputationTimePercentage) + "%"); + System.out.println(" top calculation percentage: " + df.format(topComputationTimePercentage) + "%"); + System.out.println(" redundancy check percentage: " + df.format(redundancyCheckPercentage) + "%"); + System.out.println(" evaluate set creation time percentage: " + df.format(evaluateSetCreationTimePercentage) + "%"); + System.out.println(" improper concepts removal time percentage: " + df.format(improperConceptsRemovalTimePercentage) + "%"); + System.out.println("clean time percentage: " + df.format(cleanTimePercentage) + "%"); + System.out.println("onnf time percentage: " + df.format(onnfTimePercentage) + "%"); + System.out.println("shortening time percentage: " + df.format(shorteningTimePercentage) + "%"); + } + System.out.println("properness tests (reasoner/short concept/too weak list): " + propernessTestsReasoner + "/" + propernessTestsAvoidedByShortConceptConstruction + + "/" + propernessTestsAvoidedByTooWeakList); + System.out.println("concept tests (reasoner/too weak list/overly general list/redundant concepts): " + conceptTestsReasoner + "/" + + conceptTestsTooWeakList + "/" + conceptTestsOverlyGeneralList + "/" + redundantConcepts); + } + + private int coveredNegativesOrTooWeak(Concept concept) { + if(posOnly) + return posOnlyLearningProblem.coveredPseudoNegativeExamplesOrTooWeak(concept); + else + return learningProblem.coveredNegativeExamplesOrTooWeak(concept); + } + + private int getNumberOfNegatives() { + if(posOnly) + return posOnlyLearningProblem.getPseudoNegatives().size(); + else + return learningProblem.getNegativeExamples().size(); + } + + private boolean containsTooWeakElement(MultiConjunction mc) { + for(Concept child : mc.getChildren()) { + if(tooWeakList.contains(child)) + return true; + } + return false; + } + + private boolean containsOverlyGeneralElement(MultiDisjunction md) { + for(Concept child : md.getChildren()) { + if(overlyGeneralList.contains(child)) + return true; + } + return false; + } + + public void stop() { + + } + + public Concept getBestSolution() { + return candidatesStable.last().getConcept(); + } + + public synchronized List<Concept> getBestSolutions(int nrOfSolutions) { + List<Concept> best = new LinkedList<Concept>(); + int i=0; + for(ExampleBasedNode n : candidatesStable.descendingSet()) { + best.add(n.getConcept()); + if(i==nrOfSolutions) + return best; + i++; + } + return best; + } + + public Score getSolutionScore() { + if(posOnly) + return posOnlyLearningProblem.computeScore(getBestSolution()); + else + return learningProblem.computeScore(getBestSolution()); + } + + + +} Added: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/FlexibleHeuristic.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/FlexibleHeuristic.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/FlexibleHeuristic.java 2008-01-23 12:05:44 UTC (rev 418) @@ -0,0 +1,93 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ + +package org.dllearner.algorithms.refexamples; + +import org.dllearner.utilities.ConceptComparator; + +/** + * This heuristic compares two nodes by computing a score + * using the number of covered negatives and the horizontal + * expansion factor of a node as input. Using this score + * it decides which one of the nodes seems to be more promising. + * The heuristic is flexible, because it offers a tradeoff + * between accurary and horizontal expansion (concept length). + * In contrast to the lexicographic heuristic this means that + * it sometimes prefers worse classifiers with low horizontal + * expansion over a better classifier with high horizontal + * expansion. + * + * It can be configured by using the "percentPerLenghtUnit" + * constructor argument. A higher + * value means that the algorithm is more likely to search in + * unexplored areas (= low horizontal expansion) of the search + * space vs. looking in promising but already explored (= high + * horizontal expansion) areas of the search space. + * + * @author Jens Lehmann + * + */ +public class FlexibleHeuristic implements ExampleBasedHeuristic { + + // Vergleich von Konzepten, falls alle anderen Kriterien fehlschlagen + private ConceptComparator conceptComparator = new ConceptComparator(); + private int nrOfNegativeExamples; + private double percentPerLengthUnit; + + // 5% sind eine Verlängerung um 1 wert + // double percentPerLengthUnit = 0.05; + + public FlexibleHeuristic(int nrOfNegativeExamples, double percentPerLengthUnit) { + this.nrOfNegativeExamples = nrOfNegativeExamples; + this.percentPerLengthUnit = percentPerLengthUnit; + } + + // implementiert einfach die Definition in der Diplomarbeit + public int compare(ExampleBasedNode n1, ExampleBasedNode n2) { + + // sicherstellen, dass Qualität ausgewertet wurde + if(n1.isQualityEvaluated() && n2.isQualityEvaluated() && !n1.isTooWeak() && !n2.isTooWeak()) { + + // alle scores sind negativ, größere scores sind besser + double score1 = -n1.getCoveredNegativeExamples()/(double)nrOfNegativeExamples; + score1 -= percentPerLengthUnit * n1.getConcept().getLength(); + + double score2 = -n2.getCoveredNegativeExamples()/(double)nrOfNegativeExamples; + score2 -= percentPerLengthUnit * n2.getConcept().getLength(); + + double diff = score1 - score2; + + if(diff>0) + return 1; + else if(diff<0) + return -1; + else + return conceptComparator.compare(n1.getConcept(), n2.getConcept()); + } + + throw new RuntimeException("Cannot compare nodes, which have no evaluated quality or are too weak."); + } + + @Override + public boolean equals(Object o) { + return (o instanceof FlexibleHeuristic); + } + +} Added: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/LexicographicHeuristic.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/LexicographicHeuristic.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/LexicographicHeuristic.java 2008-01-23 12:05:44 UTC (rev 418) @@ -0,0 +1,115 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * th... [truncated message content] |
From: <jen...@us...> - 2008-01-23 12:13:26
|
Revision: 419 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=419&view=rev Author: jenslehmann Date: 2008-01-23 04:13:23 -0800 (Wed, 23 Jan 2008) Log Message: ----------- small fixes Modified Paths: -------------- trunk/lib/components.ini trunk/src/dl-learner/org/dllearner/cli/Start.java Modified: trunk/lib/components.ini =================================================================== --- trunk/lib/components.ini 2008-01-23 12:05:44 UTC (rev 418) +++ trunk/lib/components.ini 2008-01-23 12:13:23 UTC (rev 419) @@ -17,4 +17,5 @@ org.dllearner.algorithms.RandomGuesser org.dllearner.algorithms.BruteForceLearner org.dllearner.algorithms.refinement.ROLearner -org.dllearner.algorithms.refinement2.ExampleBasedROLearnerComponent \ No newline at end of file +org.dllearner.algorithms.refinement2.ExampleBasedROLearnerComponent +org.dllearner.algorithms.gp.GP \ No newline at end of file Modified: trunk/src/dl-learner/org/dllearner/cli/Start.java =================================================================== --- trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-23 12:05:44 UTC (rev 418) +++ trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-23 12:13:23 UTC (rev 419) @@ -187,6 +187,8 @@ Class<? extends LearningAlgorithm> laClass = null; if (algorithmOption == null || algorithmOption.getStringValue().equals("refinement")) laClass = ROLearner.class; + else if(algorithmOption.getStringValue().equals("refexamples")) + laClass = ExampleBasedROComponent.class; else if(algorithmOption.getStringValue().equals("gp")) laClass = GP.class; else if(algorithmOption.getStringValue().equals("bruteForce")) @@ -250,7 +252,7 @@ componentPrefixMapping.put(PosNegDefinitionLP.class, "posNegDefinitionLP"); // learning algorithms componentPrefixMapping.put(ROLearner.class, "refinement"); - componentPrefixMapping.put(ExampleBasedROComponent.class, "refinement2"); + componentPrefixMapping.put(ExampleBasedROComponent.class, "refexamples"); componentPrefixMapping.put(GP.class, "gp"); return componentPrefixMapping; } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-01-24 08:57:29
|
Revision: 423 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=423&view=rev Author: jenslehmann Date: 2008-01-24 00:57:25 -0800 (Thu, 24 Jan 2008) Log Message: ----------- added example for Tilo Modified Paths: -------------- trunk/lib/components.ini trunk/src/dl-learner/org/dllearner/cli/Start.java trunk/src/dl-learner/org/dllearner/gui/ComponentRetrievalTest.java Added Paths: ----------- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROComponent.java Modified: trunk/lib/components.ini =================================================================== --- trunk/lib/components.ini 2008-01-24 01:10:00 UTC (rev 422) +++ trunk/lib/components.ini 2008-01-24 08:57:25 UTC (rev 423) @@ -17,5 +17,5 @@ org.dllearner.algorithms.RandomGuesser org.dllearner.algorithms.BruteForceLearner org.dllearner.algorithms.refinement.ROLearner -org.dllearner.algorithms.refinement2.ExampleBasedROLearnerComponent +org.dllearner.algorithms.refexamples.ExampleBasedROLComponent org.dllearner.algorithms.gp.GP \ No newline at end of file Deleted: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROComponent.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROComponent.java 2008-01-24 01:10:00 UTC (rev 422) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROComponent.java 2008-01-24 08:57:25 UTC (rev 423) @@ -1,325 +0,0 @@ -/** - * Copyright (C) 2007-2008, Jens Lehmann - * - * This file is part of DL-Learner. - * - * DL-Learner is free software; you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation; either version 3 of the License, or - * (at your option) any later version. - * - * DL-Learner is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - * - */ - -package org.dllearner.algorithms.refexamples; - -import java.io.File; -import java.util.Collection; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; - -import org.dllearner.algorithms.refinement.RhoDown; -import org.dllearner.core.LearningAlgorithm; -import org.dllearner.core.LearningProblem; -import org.dllearner.core.ReasoningService; -import org.dllearner.core.Score; -import org.dllearner.core.config.BooleanConfigOption; -import org.dllearner.core.config.CommonConfigMappings; -import org.dllearner.core.config.CommonConfigOptions; -import org.dllearner.core.config.ConfigEntry; -import org.dllearner.core.config.ConfigOption; -import org.dllearner.core.config.DoubleConfigOption; -import org.dllearner.core.config.InvalidConfigOptionValueException; -import org.dllearner.core.config.StringConfigOption; -import org.dllearner.core.dl.AtomicConcept; -import org.dllearner.core.dl.AtomicRole; -import org.dllearner.core.dl.Concept; -import org.dllearner.learningproblems.PosNegLP; -import org.dllearner.learningproblems.PosOnlyDefinitionLP; -import org.dllearner.utilities.Files; -import org.dllearner.utilities.Helper; - -/** - * The DL-Learner learning algorithm component for the example - * based refinement operator approach. It handles all - * configuration options, creates the corresponding objects and - * passes them to the actual refinement operator, heuristic, and - * learning algorithm implementations. - * - * Note: The component is not working yet. - * - * Note: The options supported by the ROLearner component and this - * one are not equal. Options that have been dropped for now: - * - horizontal expansion factor: The goal of the algorithm will - * be to (hopefully) be able to learn long and complex concepts - * more efficiently. - * A horizontal expansion factor has its benefits, but limits - * the length of concepts learnable in reasonable time to - * about 15 with its default value of 0.6 and a small sized - * background knowledge base. We hope to get more fine-grained - * control of whether it makes sense to extend a node with - * more sophisticated heuristics. - * Dropping the horizontal expansion factor means that the - * completeness of the algorithm depends on the heuristic. - * - * @author Jens Lehmann - * - */ -public class ExampleBasedROComponent extends LearningAlgorithm { - - // actual algorithm - private ExampleBasedROLearner algorithm; - - // learning problem to solve and background knowledge - private ReasoningService rs; - private LearningProblem learningProblem; - - // configuration options - private boolean writeSearchTree; - private File searchTreeFile; - private boolean replaceSearchTree = false; - private static String defaultSearchTreeFile = "log/searchTree.txt"; - private String heuristic = "lexicographic"; - Set<AtomicConcept> allowedConcepts; - Set<AtomicRole> allowedRoles; - Set<AtomicConcept> ignoredConcepts; - Set<AtomicRole> ignoredRoles; - // these are computed as the result of the previous four settings - Set<AtomicConcept> usedConcepts; - Set<AtomicRole> usedRoles; - private boolean applyAllFilter = true; - private boolean applyExistsFilter = true; - private boolean useTooWeakList = true; - private boolean useOverlyGeneralList = true; - private boolean useShortConceptConstruction = true; - private boolean improveSubsumptionHierarchy = true; - private boolean useAllConstructor = true; - private boolean useExistsConstructor = true; - private boolean useNegation = true; - - // Variablen zur Einstellung der Protokollierung - // boolean quiet = false; - boolean showBenchmarkInformation = false; - // boolean createTreeString = false; - // String searchTree = new String(); - - // Konfiguration des Algorithmus - // Faktor für horizontale Erweiterung (notwendig für completeness) - // double horizontalExpansionFactor = 0.6; - - // soll später einen Operator und eine Heuristik entgegennehmen - // public ROLearner(LearningProblem learningProblem, LearningProblem learningProblem2) { - public ExampleBasedROComponent(PosNegLP learningProblem, ReasoningService rs) { - this.learningProblem = learningProblem; - this.rs = rs; - } - - public ExampleBasedROComponent(PosOnlyDefinitionLP learningProblem, ReasoningService rs) { - this.learningProblem = learningProblem; - this.rs = rs; - } - - public static Collection<Class<? extends LearningProblem>> supportedLearningProblems() { - Collection<Class<? extends LearningProblem>> problems = new LinkedList<Class<? extends LearningProblem>>(); - problems.add(PosNegLP.class); - problems.add(PosOnlyDefinitionLP.class); - return problems; - } - - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); - options.add(new BooleanConfigOption("writeSearchTree", "specifies whether to write a search tree", false)); - options.add(new StringConfigOption("searchTreeFile","file to use for the search tree", defaultSearchTreeFile)); - options.add(new BooleanConfigOption("replaceSearchTree","specifies whether to replace the search tree in the log file after each run or append the new search tree", false)); - StringConfigOption heuristicOption = new StringConfigOption("heuristic", "specifiy the heuristic to use", "lexicographic"); - heuristicOption.setAllowedValues(new String[] {"lexicographic", "flexible"}); - options.add(heuristicOption); - options.add(new BooleanConfigOption("applyAllFilter", "usage of equivalence ALL R.C AND ALL R.D = ALL R.(C AND D)", true)); - options.add(new BooleanConfigOption("applyExistsFilter", "usage of equivalence EXISTS R.C OR EXISTS R.D = EXISTS R.(C OR D)", true)); - options.add(new BooleanConfigOption("useTooWeakList", "try to filter out too weak concepts without sending them to the reasoner", true)); - options.add(new BooleanConfigOption("useOverlyGeneralList", "try to find overly general concept without sending them to the reasoner", true)); - options.add(new BooleanConfigOption("useShortConceptConstruction", "shorten concept to see whether they already exist", true)); - DoubleConfigOption horizExp = new DoubleConfigOption("horizontalExpansionFactor", "horizontal expansion factor (see publication for description)", 0.6); - horizExp.setLowerLimit(0.0); - horizExp.setUpperLimit(1.0); - options.add(horizExp); - options.add(new BooleanConfigOption("improveSubsumptionHierarchy", "simplify subsumption hierarchy to reduce search space (see publication for description)", true)); - // allowed/ignored concepts/roles could also be a reasoner option (?) - options.add(CommonConfigOptions.allowedConcepts()); - options.add(CommonConfigOptions.ignoredConcepts()); - options.add(CommonConfigOptions.allowedRoles()); - options.add(CommonConfigOptions.ignoredRoles()); - options.add(CommonConfigOptions.useAllConstructor()); - options.add(CommonConfigOptions.useExistsConstructor()); - options.add(CommonConfigOptions.useNegation()); - return options; - } - - /* (non-Javadoc) - * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) - */ - @Override - @SuppressWarnings({"unchecked"}) - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { - String name = entry.getOptionName(); - if(name.equals("writeSearchTree")) - writeSearchTree = (Boolean) entry.getValue(); - else if(name.equals("searchTreeFile")) - searchTreeFile = new File((String)entry.getValue()); - else if(name.equals("replaceSearchTree")) - replaceSearchTree = (Boolean) entry.getValue(); - else if(name.equals("heuristic")) { - String value = (String) entry.getValue(); - if(value.equals("lexicographic")) - heuristic = "lexicographic"; - else - heuristic = "flexible"; - } else if(name.equals("allowedConcepts")) { - allowedConcepts = CommonConfigMappings.getAtomicConceptSet((Set<String>)entry.getValue()); - } else if(name.equals("allowedRoles")) { - allowedRoles = CommonConfigMappings.getAtomicRoleSet((Set<String>)entry.getValue()); - } else if(name.equals("ignoredConcepts")) { - ignoredConcepts = CommonConfigMappings.getAtomicConceptSet((Set<String>)entry.getValue()); - } else if(name.equals("ignoredRoles")) { - ignoredRoles = CommonConfigMappings.getAtomicRoleSet((Set<String>)entry.getValue()); - } else if(name.equals("applyAllFilter")) { - applyAllFilter = (Boolean) entry.getValue(); - } else if(name.equals("applyExistsFilter")) { - applyExistsFilter = (Boolean) entry.getValue(); - } else if(name.equals("useTooWeakList")) { - useTooWeakList = (Boolean) entry.getValue(); - } else if(name.equals("useOverlyGeneralList")) { - useOverlyGeneralList = (Boolean) entry.getValue(); - } else if(name.equals("useShortConceptConstruction")) { - useShortConceptConstruction = (Boolean) entry.getValue(); - } else if(name.equals("improveSubsumptionHierarchy")) { - improveSubsumptionHierarchy = (Boolean) entry.getValue(); - } else if(name.equals("useAllConstructor")) { - useAllConstructor = (Boolean) entry.getValue(); - } else if(name.equals("useExistsConstructor")) { - useExistsConstructor = (Boolean) entry.getValue(); - } else if(name.equals("useNegation")) { - useNegation = (Boolean) entry.getValue(); - } - - } - - /* (non-Javadoc) - * @see org.dllearner.core.Component#init() - */ - @Override - public void init() { - if(searchTreeFile == null) - searchTreeFile = new File(defaultSearchTreeFile); - - if(writeSearchTree) - Files.clearFile(searchTreeFile); - - // adjust heuristic - ExampleBasedHeuristic algHeuristic; - - if(heuristic == "lexicographic") - algHeuristic = new LexicographicHeuristic(); - else { - if(learningProblem instanceof PosOnlyDefinitionLP) { - throw new RuntimeException("does not work with positive examples only yet"); - } - algHeuristic = null; - // algHeuristic = new FlexibleHeuristic(learningProblem.getNegativeExamples().size(), learningProblem.getPercentPerLengthUnit()); - } - - // compute used concepts/roles from allowed/ignored - // concepts/roles - if(allowedConcepts != null) { - // sanity check to control if no non-existing concepts are in the list - Helper.checkConcepts(rs, allowedConcepts); - usedConcepts = allowedConcepts; - } else if(ignoredConcepts != null) { - usedConcepts = Helper.computeConceptsUsingIgnoreList(rs, ignoredConcepts); - } else { - usedConcepts = Helper.computeConcepts(rs); - } - - if(allowedRoles != null) { - Helper.checkRoles(rs, allowedRoles); - usedRoles = allowedRoles; - } else if(ignoredRoles != null) { - Helper.checkRoles(rs, ignoredRoles); - usedRoles = Helper.difference(rs.getAtomicRoles(), ignoredRoles); - } else { - usedRoles = rs.getAtomicRoles(); - } - - // prepare subsumption and role hierarchies, because they are needed - // during the run of the algorithm - rs.prepareSubsumptionHierarchy(usedConcepts); - if(improveSubsumptionHierarchy) - rs.getSubsumptionHierarchy().improveSubsumptionHierarchy(); - rs.prepareRoleHierarchy(usedRoles); - - // create a refinement operator and pass all configuration - // variables to it - RhoDown operator = new RhoDown( - rs, - applyAllFilter, - applyExistsFilter, - useAllConstructor, - useExistsConstructor, - useNegation - ); - - // create an algorithm object and pass all configuration - // options to it - algorithm = new ExampleBasedROLearner( - learningProblem, - operator, - algHeuristic, - usedConcepts, - usedRoles, - writeSearchTree, - replaceSearchTree, - searchTreeFile, - useTooWeakList, - useOverlyGeneralList, - useShortConceptConstruction - ); - } - - public static String getName() { - return "example driven refinement operator based learning algorithm [not working]"; - } - - @Override - public void start() { - algorithm.start(); - } - - @Override - public Score getSolutionScore() { - return algorithm.getSolutionScore(); - } - - @Override - public Concept getBestSolution() { - return algorithm.getBestSolution(); - } - - @Override - public synchronized List<Concept> getBestSolutions(int nrOfSolutions) { - return algorithm.getBestSolutions(nrOfSolutions); - } - - @Override - public void stop() { - algorithm.stop(); - } - -} Copied: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java (from rev 421, trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROComponent.java) =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java 2008-01-24 08:57:25 UTC (rev 423) @@ -0,0 +1,325 @@ +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ + +package org.dllearner.algorithms.refexamples; + +import java.io.File; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import org.dllearner.algorithms.refinement.RhoDown; +import org.dllearner.core.LearningAlgorithm; +import org.dllearner.core.LearningProblem; +import org.dllearner.core.ReasoningService; +import org.dllearner.core.Score; +import org.dllearner.core.config.BooleanConfigOption; +import org.dllearner.core.config.CommonConfigMappings; +import org.dllearner.core.config.CommonConfigOptions; +import org.dllearner.core.config.ConfigEntry; +import org.dllearner.core.config.ConfigOption; +import org.dllearner.core.config.DoubleConfigOption; +import org.dllearner.core.config.InvalidConfigOptionValueException; +import org.dllearner.core.config.StringConfigOption; +import org.dllearner.core.dl.AtomicConcept; +import org.dllearner.core.dl.AtomicRole; +import org.dllearner.core.dl.Concept; +import org.dllearner.learningproblems.PosNegLP; +import org.dllearner.learningproblems.PosOnlyDefinitionLP; +import org.dllearner.utilities.Files; +import org.dllearner.utilities.Helper; + +/** + * The DL-Learner learning algorithm component for the example + * based refinement operator approach. It handles all + * configuration options, creates the corresponding objects and + * passes them to the actual refinement operator, heuristic, and + * learning algorithm implementations. + * + * Note: The component is not working yet. + * + * Note: The options supported by the ROLearner component and this + * one are not equal. Options that have been dropped for now: + * - horizontal expansion factor: The goal of the algorithm will + * be to (hopefully) be able to learn long and complex concepts + * more efficiently. + * A horizontal expansion factor has its benefits, but limits + * the length of concepts learnable in reasonable time to + * about 15 with its default value of 0.6 and a small sized + * background knowledge base. We hope to get more fine-grained + * control of whether it makes sense to extend a node with + * more sophisticated heuristics. + * Dropping the horizontal expansion factor means that the + * completeness of the algorithm depends on the heuristic. + * + * @author Jens Lehmann + * + */ +public class ExampleBasedROLComponent extends LearningAlgorithm { + + // actual algorithm + private ExampleBasedROLearner algorithm; + + // learning problem to solve and background knowledge + private ReasoningService rs; + private LearningProblem learningProblem; + + // configuration options + private boolean writeSearchTree; + private File searchTreeFile; + private boolean replaceSearchTree = false; + private static String defaultSearchTreeFile = "log/searchTree.txt"; + private String heuristic = "lexicographic"; + Set<AtomicConcept> allowedConcepts; + Set<AtomicRole> allowedRoles; + Set<AtomicConcept> ignoredConcepts; + Set<AtomicRole> ignoredRoles; + // these are computed as the result of the previous four settings + Set<AtomicConcept> usedConcepts; + Set<AtomicRole> usedRoles; + private boolean applyAllFilter = true; + private boolean applyExistsFilter = true; + private boolean useTooWeakList = true; + private boolean useOverlyGeneralList = true; + private boolean useShortConceptConstruction = true; + private boolean improveSubsumptionHierarchy = true; + private boolean useAllConstructor = true; + private boolean useExistsConstructor = true; + private boolean useNegation = true; + + // Variablen zur Einstellung der Protokollierung + // boolean quiet = false; + boolean showBenchmarkInformation = false; + // boolean createTreeString = false; + // String searchTree = new String(); + + // Konfiguration des Algorithmus + // Faktor für horizontale Erweiterung (notwendig für completeness) + // double horizontalExpansionFactor = 0.6; + + // soll später einen Operator und eine Heuristik entgegennehmen + // public ROLearner(LearningProblem learningProblem, LearningProblem learningProblem2) { + public ExampleBasedROLComponent(PosNegLP learningProblem, ReasoningService rs) { + this.learningProblem = learningProblem; + this.rs = rs; + } + + public ExampleBasedROLComponent(PosOnlyDefinitionLP learningProblem, ReasoningService rs) { + this.learningProblem = learningProblem; + this.rs = rs; + } + + public static Collection<Class<? extends LearningProblem>> supportedLearningProblems() { + Collection<Class<? extends LearningProblem>> problems = new LinkedList<Class<? extends LearningProblem>>(); + problems.add(PosNegLP.class); + problems.add(PosOnlyDefinitionLP.class); + return problems; + } + + public static Collection<ConfigOption<?>> createConfigOptions() { + Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); + options.add(new BooleanConfigOption("writeSearchTree", "specifies whether to write a search tree", false)); + options.add(new StringConfigOption("searchTreeFile","file to use for the search tree", defaultSearchTreeFile)); + options.add(new BooleanConfigOption("replaceSearchTree","specifies whether to replace the search tree in the log file after each run or append the new search tree", false)); + StringConfigOption heuristicOption = new StringConfigOption("heuristic", "specifiy the heuristic to use", "lexicographic"); + heuristicOption.setAllowedValues(new String[] {"lexicographic", "flexible"}); + options.add(heuristicOption); + options.add(new BooleanConfigOption("applyAllFilter", "usage of equivalence ALL R.C AND ALL R.D = ALL R.(C AND D)", true)); + options.add(new BooleanConfigOption("applyExistsFilter", "usage of equivalence EXISTS R.C OR EXISTS R.D = EXISTS R.(C OR D)", true)); + options.add(new BooleanConfigOption("useTooWeakList", "try to filter out too weak concepts without sending them to the reasoner", true)); + options.add(new BooleanConfigOption("useOverlyGeneralList", "try to find overly general concept without sending them to the reasoner", true)); + options.add(new BooleanConfigOption("useShortConceptConstruction", "shorten concept to see whether they already exist", true)); + DoubleConfigOption horizExp = new DoubleConfigOption("horizontalExpansionFactor", "horizontal expansion factor (see publication for description)", 0.6); + horizExp.setLowerLimit(0.0); + horizExp.setUpperLimit(1.0); + options.add(horizExp); + options.add(new BooleanConfigOption("improveSubsumptionHierarchy", "simplify subsumption hierarchy to reduce search space (see publication for description)", true)); + // allowed/ignored concepts/roles could also be a reasoner option (?) + options.add(CommonConfigOptions.allowedConcepts()); + options.add(CommonConfigOptions.ignoredConcepts()); + options.add(CommonConfigOptions.allowedRoles()); + options.add(CommonConfigOptions.ignoredRoles()); + options.add(CommonConfigOptions.useAllConstructor()); + options.add(CommonConfigOptions.useExistsConstructor()); + options.add(CommonConfigOptions.useNegation()); + return options; + } + + /* (non-Javadoc) + * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) + */ + @Override + @SuppressWarnings({"unchecked"}) + public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { + String name = entry.getOptionName(); + if(name.equals("writeSearchTree")) + writeSearchTree = (Boolean) entry.getValue(); + else if(name.equals("searchTreeFile")) + searchTreeFile = new File((String)entry.getValue()); + else if(name.equals("replaceSearchTree")) + replaceSearchTree = (Boolean) entry.getValue(); + else if(name.equals("heuristic")) { + String value = (String) entry.getValue(); + if(value.equals("lexicographic")) + heuristic = "lexicographic"; + else + heuristic = "flexible"; + } else if(name.equals("allowedConcepts")) { + allowedConcepts = CommonConfigMappings.getAtomicConceptSet((Set<String>)entry.getValue()); + } else if(name.equals("allowedRoles")) { + allowedRoles = CommonConfigMappings.getAtomicRoleSet((Set<String>)entry.getValue()); + } else if(name.equals("ignoredConcepts")) { + ignoredConcepts = CommonConfigMappings.getAtomicConceptSet((Set<String>)entry.getValue()); + } else if(name.equals("ignoredRoles")) { + ignoredRoles = CommonConfigMappings.getAtomicRoleSet((Set<String>)entry.getValue()); + } else if(name.equals("applyAllFilter")) { + applyAllFilter = (Boolean) entry.getValue(); + } else if(name.equals("applyExistsFilter")) { + applyExistsFilter = (Boolean) entry.getValue(); + } else if(name.equals("useTooWeakList")) { + useTooWeakList = (Boolean) entry.getValue(); + } else if(name.equals("useOverlyGeneralList")) { + useOverlyGeneralList = (Boolean) entry.getValue(); + } else if(name.equals("useShortConceptConstruction")) { + useShortConceptConstruction = (Boolean) entry.getValue(); + } else if(name.equals("improveSubsumptionHierarchy")) { + improveSubsumptionHierarchy = (Boolean) entry.getValue(); + } else if(name.equals("useAllConstructor")) { + useAllConstructor = (Boolean) entry.getValue(); + } else if(name.equals("useExistsConstructor")) { + useExistsConstructor = (Boolean) entry.getValue(); + } else if(name.equals("useNegation")) { + useNegation = (Boolean) entry.getValue(); + } + + } + + /* (non-Javadoc) + * @see org.dllearner.core.Component#init() + */ + @Override + public void init() { + if(searchTreeFile == null) + searchTreeFile = new File(defaultSearchTreeFile); + + if(writeSearchTree) + Files.clearFile(searchTreeFile); + + // adjust heuristic + ExampleBasedHeuristic algHeuristic; + + if(heuristic == "lexicographic") + algHeuristic = new LexicographicHeuristic(); + else { + if(learningProblem instanceof PosOnlyDefinitionLP) { + throw new RuntimeException("does not work with positive examples only yet"); + } + algHeuristic = null; + // algHeuristic = new FlexibleHeuristic(learningProblem.getNegativeExamples().size(), learningProblem.getPercentPerLengthUnit()); + } + + // compute used concepts/roles from allowed/ignored + // concepts/roles + if(allowedConcepts != null) { + // sanity check to control if no non-existing concepts are in the list + Helper.checkConcepts(rs, allowedConcepts); + usedConcepts = allowedConcepts; + } else if(ignoredConcepts != null) { + usedConcepts = Helper.computeConceptsUsingIgnoreList(rs, ignoredConcepts); + } else { + usedConcepts = Helper.computeConcepts(rs); + } + + if(allowedRoles != null) { + Helper.checkRoles(rs, allowedRoles); + usedRoles = allowedRoles; + } else if(ignoredRoles != null) { + Helper.checkRoles(rs, ignoredRoles); + usedRoles = Helper.difference(rs.getAtomicRoles(), ignoredRoles); + } else { + usedRoles = rs.getAtomicRoles(); + } + + // prepare subsumption and role hierarchies, because they are needed + // during the run of the algorithm + rs.prepareSubsumptionHierarchy(usedConcepts); + if(improveSubsumptionHierarchy) + rs.getSubsumptionHierarchy().improveSubsumptionHierarchy(); + rs.prepareRoleHierarchy(usedRoles); + + // create a refinement operator and pass all configuration + // variables to it + RhoDown operator = new RhoDown( + rs, + applyAllFilter, + applyExistsFilter, + useAllConstructor, + useExistsConstructor, + useNegation + ); + + // create an algorithm object and pass all configuration + // options to it + algorithm = new ExampleBasedROLearner( + learningProblem, + operator, + algHeuristic, + usedConcepts, + usedRoles, + writeSearchTree, + replaceSearchTree, + searchTreeFile, + useTooWeakList, + useOverlyGeneralList, + useShortConceptConstruction + ); + } + + public static String getName() { + return "example driven refinement operator based learning algorithm [not working]"; + } + + @Override + public void start() { + algorithm.start(); + } + + @Override + public Score getSolutionScore() { + return algorithm.getSolutionScore(); + } + + @Override + public Concept getBestSolution() { + return algorithm.getBestSolution(); + } + + @Override + public synchronized List<Concept> getBestSolutions(int nrOfSolutions) { + return algorithm.getBestSolutions(nrOfSolutions); + } + + @Override + public void stop() { + algorithm.stop(); + } + +} Modified: trunk/src/dl-learner/org/dllearner/cli/Start.java =================================================================== --- trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-24 01:10:00 UTC (rev 422) +++ trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-01-24 08:57:25 UTC (rev 423) @@ -41,7 +41,7 @@ import org.dllearner.algorithms.BruteForceLearner; import org.dllearner.algorithms.RandomGuesser; import org.dllearner.algorithms.gp.GP; -import org.dllearner.algorithms.refexamples.ExampleBasedROComponent; +import org.dllearner.algorithms.refexamples.ExampleBasedROLComponent; import org.dllearner.algorithms.refinement.ROLearner; import org.dllearner.core.Component; import org.dllearner.core.ComponentManager; @@ -188,7 +188,7 @@ if (algorithmOption == null || algorithmOption.getStringValue().equals("refinement")) laClass = ROLearner.class; else if(algorithmOption.getStringValue().equals("refexamples")) - laClass = ExampleBasedROComponent.class; + laClass = ExampleBasedROLComponent.class; else if(algorithmOption.getStringValue().equals("gp")) laClass = GP.class; else if(algorithmOption.getStringValue().equals("bruteForce")) @@ -252,7 +252,7 @@ componentPrefixMapping.put(PosNegDefinitionLP.class, "posNegDefinitionLP"); // learning algorithms componentPrefixMapping.put(ROLearner.class, "refinement"); - componentPrefixMapping.put(ExampleBasedROComponent.class, "refexamples"); + componentPrefixMapping.put(ExampleBasedROLComponent.class, "refexamples"); componentPrefixMapping.put(GP.class, "gp"); return componentPrefixMapping; } Modified: trunk/src/dl-learner/org/dllearner/gui/ComponentRetrievalTest.java =================================================================== --- trunk/src/dl-learner/org/dllearner/gui/ComponentRetrievalTest.java 2008-01-24 01:10:00 UTC (rev 422) +++ trunk/src/dl-learner/org/dllearner/gui/ComponentRetrievalTest.java 2008-01-24 08:57:25 UTC (rev 423) @@ -23,6 +23,10 @@ import org.dllearner.core.ComponentManager; import org.dllearner.core.KnowledgeSource; +import org.dllearner.core.config.ConfigEntry; +import org.dllearner.core.config.InvalidConfigOptionValueException; +import org.dllearner.core.config.StringConfigOption; +import org.dllearner.kb.OWLFile; /** * @author Jens Lehmann @@ -34,10 +38,23 @@ * @param args */ public static void main(String[] args) { + // example 1: getting all components of a given type ComponentManager cm = ComponentManager.getInstance(); List<Class<? extends KnowledgeSource>> sources = cm.getKnowledgeSources(); cm.knowledgeSource(sources.get(0)); System.out.println(sources.get(1).toString()); + + // example 2: setting a config value using a ConfigEntry object + KnowledgeSource owlFile = cm.knowledgeSource(OWLFile.class); + StringConfigOption urlOption = (StringConfigOption) cm.getConfigOption(OWLFile.class, "url"); + ConfigEntry<String> urlEntry; + try { + urlEntry = new ConfigEntry<String>(urlOption, "http://example.com"); + cm.applyConfigEntry(owlFile, urlEntry); + } catch (InvalidConfigOptionValueException e) { + e.printStackTrace(); + } + } } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-02-06 16:36:39
|
Revision: 502 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=502&view=rev Author: jenslehmann Date: 2008-02-06 08:36:36 -0800 (Wed, 06 Feb 2008) Log Message: ----------- - removed KAON2 from project such that DL-Learner now builds solely upon open source libraries - direct KAON2 reasoning no longer possible (KAON2 can still be used over DIG if desired) - most of the other KAON2 functionality rewritten to OWL API, in particular KB to OWL export Modified Paths: -------------- trunk/lib/components.ini trunk/src/dl-learner/org/dllearner/core/ReasoningService.java trunk/src/dl-learner/org/dllearner/kb/KBFile.java trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java trunk/src/dl-learner/org/dllearner/utilities/CrossValidation.java trunk/src/dl-learner/org/dllearner/utilities/OntologyClassRewriter.java Removed Paths: ------------- trunk/src/dl-learner/org/dllearner/reasoning/KAON2Reasoner.java Modified: trunk/lib/components.ini =================================================================== --- trunk/lib/components.ini 2008-02-06 15:24:06 UTC (rev 501) +++ trunk/lib/components.ini 2008-02-06 16:36:36 UTC (rev 502) @@ -8,7 +8,6 @@ org.dllearner.reasoning.OWLAPIReasoner org.dllearner.reasoning.DIGReasoner org.dllearner.reasoning.FastRetrievalReasoner -org.dllearner.reasoning.KAON2Reasoner # learning problems org.dllearner.learningproblems.PosNegDefinitionLP org.dllearner.learningproblems.PosNegInclusionLP Modified: trunk/src/dl-learner/org/dllearner/core/ReasoningService.java =================================================================== --- trunk/src/dl-learner/org/dllearner/core/ReasoningService.java 2008-02-06 15:24:06 UTC (rev 501) +++ trunk/src/dl-learner/org/dllearner/core/ReasoningService.java 2008-02-06 16:36:36 UTC (rev 502) @@ -20,7 +20,6 @@ package org.dllearner.core; -import java.io.File; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -34,8 +33,6 @@ import org.dllearner.core.dl.Individual; import org.dllearner.core.dl.RoleHierarchy; import org.dllearner.core.dl.SubsumptionHierarchy; -import org.dllearner.reasoning.DIGReasoner; -import org.dllearner.reasoning.KAON2Reasoner; import org.dllearner.reasoning.ReasonerType; import org.dllearner.utilities.SortedSetTuple; @@ -400,17 +397,6 @@ return result; } - // speichern einer Ontolgie wird speziell behandelt, da kein Reasoning - public void saveOntology(File file, OntologyFormat format) { - if (getReasonerType() == ReasonerType.KAON2) { - ((KAON2Reasoner) reasoner).saveOntology(file, format); - } else if (getReasonerType() == ReasonerType.DIG) { - // DIG erzeugt momentan auch nur einen KAON2-Reasoner und - // exportiert dann mit der obigen Funktion - ((DIGReasoner) reasoner).saveOntology(file, format); - } - } - public Set<AtomicConcept> getAtomicConcepts() { return reasoner.getAtomicConcepts(); } Modified: trunk/src/dl-learner/org/dllearner/kb/KBFile.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/KBFile.java 2008-02-06 15:24:06 UTC (rev 501) +++ trunk/src/dl-learner/org/dllearner/kb/KBFile.java 2008-02-06 16:36:36 UTC (rev 502) @@ -36,12 +36,20 @@ import org.dllearner.parser.KBParser; import org.dllearner.parser.ParseException; import org.dllearner.reasoning.DIGConverter; -import org.dllearner.reasoning.KAON2Reasoner; -import org.semanticweb.kaon2.api.KAON2Exception; -import org.semanticweb.kaon2.api.formatting.OntologyFileFormat; -import org.semanticweb.kaon2.api.reasoner.Reasoner; +import org.dllearner.reasoning.OWLAPIReasoner; +import org.semanticweb.owl.apibinding.OWLManager; +import org.semanticweb.owl.model.OWLOntology; +import org.semanticweb.owl.model.OWLOntologyCreationException; +import org.semanticweb.owl.model.OWLOntologyManager; +import org.semanticweb.owl.model.OWLOntologyStorageException; +import org.semanticweb.owl.model.UnknownOWLOntologyException; +import org.semanticweb.owl.util.SimpleURIMapper; /** + * KB files are an internal convenience format used in DL-Learner. Their + * syntax is close to Description Logics and easy to use. KB files can be + * exported to OWL for usage outside of DL-Learner. + * * @author Jens Lehmann * */ @@ -118,25 +126,47 @@ @Override public void export(File file, org.dllearner.core.OntologyFormat format){ - Reasoner kaon2Reasoner = KAON2Reasoner.getKAON2Reasoner(kb); - - String kaon2Format = null; - if(format.equals(org.dllearner.core.OntologyFormat.RDF_XML)) - kaon2Format = OntologyFileFormat.OWL_RDF; - else { - System.err.println("Warning: Cannot export format " + format + ". Exiting."); - System.exit(0); - } - + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + URI ontologyURI = URI.create("http://example.com"); + URI physicalURI = file.toURI(); + SimpleURIMapper mapper = new SimpleURIMapper(ontologyURI, physicalURI); + manager.addURIMapper(mapper); + OWLOntology ontology; try { - kaon2Reasoner.getOntology().saveOntology(kaon2Format,file,"ISO-8859-1"); - } catch (KAON2Exception e) { + ontology = manager.createOntology(ontologyURI); + OWLAPIReasoner.fillOWLAPIOntology(manager,ontology,kb); + manager.saveOntology(ontology); + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block e.printStackTrace(); - } catch (IOException e) { + } catch (UnknownOWLOntologyException e) { + // TODO Auto-generated catch block e.printStackTrace(); - } catch (InterruptedException e) { + } catch (OWLOntologyStorageException e) { + // TODO Auto-generated catch block e.printStackTrace(); - } + } + + +// Reasoner kaon2Reasoner = KAON2Reasoner.getKAON2Reasoner(kb); +// +// String kaon2Format = null; +// if(format.equals(org.dllearner.core.OntologyFormat.RDF_XML)) +// kaon2Format = OntologyFileFormat.OWL_RDF; +// else { +// System.err.println("Warning: Cannot export format " + format + ". Exiting."); +// System.exit(0); +// } +// +// try { +// kaon2Reasoner.getOntology().saveOntology(kaon2Format,file,"ISO-8859-1"); +// } catch (KAON2Exception e) { +// e.printStackTrace(); +// } catch (IOException e) { +// e.printStackTrace(); +// } catch (InterruptedException e) { +// e.printStackTrace(); +// } } public URL getURL() { Deleted: trunk/src/dl-learner/org/dllearner/reasoning/KAON2Reasoner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/KAON2Reasoner.java 2008-02-06 15:24:06 UTC (rev 501) +++ trunk/src/dl-learner/org/dllearner/reasoning/KAON2Reasoner.java 2008-02-06 16:36:36 UTC (rev 502) @@ -1,859 +0,0 @@ -package org.dllearner.reasoning; - -import java.io.File; -import java.io.IOException; -import java.net.URL; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeMap; -import java.util.TreeSet; - -import org.dllearner.core.ReasonerComponent; -import org.dllearner.core.config.CommonConfigOptions; -import org.dllearner.core.config.ConfigEntry; -import org.dllearner.core.config.ConfigOption; -import org.dllearner.core.config.InvalidConfigOptionValueException; -import org.dllearner.core.dl.All; -import org.dllearner.core.dl.AssertionalAxiom; -import org.dllearner.core.dl.AtomicConcept; -import org.dllearner.core.dl.AtomicRole; -import org.dllearner.core.dl.Bottom; -import org.dllearner.core.dl.Concept; -import org.dllearner.core.dl.ConceptAssertion; -import org.dllearner.core.dl.Conjunction; -import org.dllearner.core.dl.Disjunction; -import org.dllearner.core.dl.Equality; -import org.dllearner.core.dl.Exists; -import org.dllearner.core.dl.FunctionalRoleAxiom; -import org.dllearner.core.dl.Inclusion; -import org.dllearner.core.dl.Individual; -import org.dllearner.core.dl.InverseRoleAxiom; -import org.dllearner.core.dl.KB; -import org.dllearner.core.dl.MultiConjunction; -import org.dllearner.core.dl.MultiDisjunction; -import org.dllearner.core.dl.Negation; -import org.dllearner.core.dl.RBoxAxiom; -import org.dllearner.core.dl.RoleAssertion; -import org.dllearner.core.dl.SubRoleAxiom; -import org.dllearner.core.dl.SymmetricRoleAxiom; -import org.dllearner.core.dl.TerminologicalAxiom; -import org.dllearner.core.dl.Top; -import org.dllearner.core.dl.TransitiveRoleAxiom; -import org.dllearner.utilities.ConceptComparator; -import org.dllearner.utilities.Helper; -import org.semanticweb.kaon2.api.DefaultOntologyResolver; -import org.semanticweb.kaon2.api.KAON2Connection; -import org.semanticweb.kaon2.api.KAON2Exception; -import org.semanticweb.kaon2.api.KAON2Manager; -import org.semanticweb.kaon2.api.Ontology; -import org.semanticweb.kaon2.api.Request; -import org.semanticweb.kaon2.api.formatting.OntologyFileFormat; -import org.semanticweb.kaon2.api.logic.Formula; -import org.semanticweb.kaon2.api.logic.Literal; -import org.semanticweb.kaon2.api.logic.QueryDefinition; -import org.semanticweb.kaon2.api.logic.Variable; -import org.semanticweb.kaon2.api.owl.axioms.ObjectPropertyAttribute; -import org.semanticweb.kaon2.api.owl.elements.Description; -import org.semanticweb.kaon2.api.owl.elements.OWLClass; -import org.semanticweb.kaon2.api.owl.elements.ObjectProperty; -import org.semanticweb.kaon2.api.reasoner.Query; -import org.semanticweb.kaon2.api.reasoner.SubsumptionHierarchy; -import org.semanticweb.kaon2.api.reasoner.SubsumptionHierarchy.Node; - -/** - * - * Subsumption-Hierarchie wird automatisch beim ersten Aufruf von getMoreGeneral|SpecialConcept - * berechnet. - * - * @author jl - * - */ -public class KAON2Reasoner extends ReasonerComponent { - - // configuration options - private boolean una = false; - - ConceptComparator conceptComparator = new ConceptComparator(); - - Set<AtomicConcept> atomicConcepts; - Set<AtomicRole> atomicRoles; - SortedSet<Individual> individuals; - SubsumptionHierarchy kaon2SubsumptionHierarchy = null; - org.dllearner.core.dl.SubsumptionHierarchy subsumptionHierarchy; - - private org.semanticweb.kaon2.api.reasoner.Reasoner kaon2Reasoner; - private KAON2Connection kaon2Connection; - - public KAON2Reasoner(KB kb, Map<URL,org.dllearner.core.OntologyFormat> imports) { - - if(imports.size()>1) - System.out.println("Warning: KAON2-Reasoner currently supports only one import file. Ignoring all other imports."); - - try { - kaon2Connection = KAON2Manager.newConnection(); - } catch (KAON2Exception e2) { - e2.printStackTrace(); - } - - DefaultOntologyResolver resolver = new DefaultOntologyResolver(); - - // Set<String> ontologyURIs = new HashSet<String>(); - String ontologyURI = ""; - URL importFile = null; - if (!imports.isEmpty()) { - // alter Code - nicht empfehlenswert, da feste URI zugewiesen wird - // resolver.registerReplacement("foo", imports.get(0).toURI().toString()); - - // neuer Code - liest Dateien richtig ein - // es ist aber noch nicht richtig klar, was bei mehreren eingelesenen - // Ontologien passieren soll - // for(File file : imports) { - // String ontologyURI = resolver.registerOntology(file); - // ontologyURIs.add(ontologyURI); - //} - - // eine beliebige Datei auswählen - importFile = imports.keySet().iterator().next(); - - try { - // System.out.println(imports.get(0)); - // resolver.r - - // TODO: testen, ob Umstellung von File auf URL funktioniert! - // ontologyURI = resolver.registerOntology(importFile); - ontologyURI = resolver.registerOntology(importFile.toString()); - - } catch (KAON2Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } else { - // falls nur aus Config-Datei gelesen wird, dann wird resolver - // eigentlich nicht gebraucht => trotzdem erwartet KAON2 ein physische - // URI - // resolver.registerReplacement("foo", "file:foo.xml"); - resolver.registerReplacement("http://localhost/foo", "file:nothing.xml"); - } - - kaon2Connection.setOntologyResolver(resolver); - Ontology ontology = null; - - if (!imports.isEmpty()) { - System.out.print("Importing Ontology " + importFile.toString() + " ... "); - ontology = importKB(ontologyURI, imports.get(importFile), kaon2Connection); - } else { - try { - // ontology = connection.createOntology("foo", new - // HashMap<String, Object>()); - ontology = kaon2Connection.createOntology("http://localhost/foo", - new HashMap<String, Object>()); - } catch (KAON2Exception e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } - } - - // System.out.println(importedFile.getPath().toURI().toString()); - // resolver.registerReplacement("foo", "file:" + - // importedFile.getPath()); - // resolver.registerReplacement("foo", - // importedFile.toURI().toString()); - - kaon2Reasoner = getKAON2Reasoner(kb, ontology); - - // Individuals, Concepts und Roles aus Wissensbasis auslesen - Request<ObjectProperty> requestRoles = ontology - .createEntityRequest(ObjectProperty.class); - Request<OWLClass> requestConcepts = ontology.createEntityRequest(OWLClass.class); - Request<org.semanticweb.kaon2.api.owl.elements.Individual> requestIndividuals = ontology - .createEntityRequest(org.semanticweb.kaon2.api.owl.elements.Individual.class); - - atomicConcepts = new HashSet<AtomicConcept>(); - atomicRoles = new HashSet<AtomicRole>(); - individuals = new TreeSet<Individual>(); - - try { - for (ObjectProperty role : requestRoles.get()) { - atomicRoles.add(new AtomicRole(role.toString())); - // getRole(role.toString()); - } - - for (OWLClass concept : requestConcepts.get()) { - // Top und Bottom sind bei mir keine atomaren Konzepte, sondern - // werden - // extra behandelt - if (!concept.equals(KAON2Manager.factory().thing()) - && !concept.equals(KAON2Manager.factory().nothing())) - atomicConcepts.add(new AtomicConcept(concept.toString())); - // System.out.println(concept.toString()); - } - for (org.semanticweb.kaon2.api.owl.elements.Individual ind : requestIndividuals.get()) { - // getIndividual(ind.toString()); - individuals.add(new Individual(ind.toString())); - } - - // je nachdem, ob unique names assumption aktiviert ist, muss - // man jetzt noch hinzuf�gen, dass die Individuen verschieden sind - if (una) { - Set<org.semanticweb.kaon2.api.owl.elements.Individual> individualsSet = new HashSet<org.semanticweb.kaon2.api.owl.elements.Individual>(); - for (Individual individual : individuals) - individualsSet.add(KAON2Manager.factory().individual(individual.getName())); - ontology.addAxiom(KAON2Manager.factory().differentIndividuals( - individualsSet)); - } - - } catch (KAON2Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - } - - // wandelt einen Bezeichner in eine URI für die interne KB - // (http://localhost/foo) um; - // die Umwandlungen machen KAON2 noch langsamer, aber sie sind notwendig - // für eine korrekte Ontologie, die wiederum für den Export wichtig ist; - // es wird einfach gesagt, dass alles was nicht mit "http://" beginnt ein - // interner Bezeichner ist; - // eine sauberere Lösung wäre für die internen Sachen auch immer eine URI - // zu fordern bzw. eine ähnliche Heuristik wie hier schon beim parsen zu - // verwenden; für DIG ist das allerdings nicht notwendig (basiert auf DLs, - // also benötigt keine URIs) und erfordert relativ umfangreiche Änderungen - // => es wird jetzt doch die saubere Lösung verwendet - /* - private String getInternalURI(String name) { - if(name.startsWith("http://")) - return name; - else - return internalNamespace + name; - } - - // wandelt eine lokal vergebene URI in einen Bezeichner um - private String getNameFromInternalURI(String uri) { - if(uri.startsWith(internalNamespace)) - return uri.substring(internalNamespace.length()); - else - return uri; - } - */ - - public static String getName() { - return "KAON2 reasoner"; - } - - // TODO: hier werden momentan keine allowed concepts berücksichtigt - // (benötigt rekursive Aufrufe, da ein erlaubtes Konzept von einem nicht - // erlaubten verdeckt werden könnte) - public void prepareSubsumptionHierarchy(Set<AtomicConcept> allowedConcepts) { - try { - kaon2SubsumptionHierarchy = kaon2Reasoner.getSubsumptionHierarchy(); - } catch (KAON2Exception e) { - e.printStackTrace(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - - // umwandeln in eine für die Lernalgorithmen verwertbare - // Subsumptionhierarchie - TreeMap<Concept,TreeSet<Concept>> subsumptionHierarchyUp = new TreeMap<Concept,TreeSet<Concept>>(conceptComparator); - TreeMap<Concept,TreeSet<Concept>> subsumptionHierarchyDown = new TreeMap<Concept,TreeSet<Concept>>(conceptComparator); - - Top top = new Top(); - OWLClass kaon2Top = KAON2Manager.factory().thing(); - subsumptionHierarchyDown.put(top, (TreeSet<Concept>) getConceptsFromSubsumptionHierarchyNodes(kaon2SubsumptionHierarchy.getNodeFor(kaon2Top).getChildNodes())); - // subsumptionHierarchyUp.put(top, new TreeSet<Concept>(conceptComparator)); - - Bottom bottom = new Bottom(); - OWLClass kaon2Bottom = KAON2Manager.factory().nothing(); - subsumptionHierarchyUp.put(bottom, (TreeSet<Concept>) getConceptsFromSubsumptionHierarchyNodes(kaon2SubsumptionHierarchy.getNodeFor(kaon2Bottom).getParentNodes())); - // subsumptionHierarchyDown.put(bottom, new TreeSet<Concept>(conceptComparator)); - - for(AtomicConcept ac : atomicConcepts) { - OWLClass kaon2Ac = (OWLClass) getKAON2Description(ac); - subsumptionHierarchyDown.put(ac, (TreeSet<Concept>) getConceptsFromSubsumptionHierarchyNodes(kaon2SubsumptionHierarchy.getNodeFor(kaon2Ac).getChildNodes())); - subsumptionHierarchyUp.put(ac, (TreeSet<Concept>) getConceptsFromSubsumptionHierarchyNodes(kaon2SubsumptionHierarchy.getNodeFor(kaon2Ac).getParentNodes())); - } - - subsumptionHierarchy = new org.dllearner.core.dl.SubsumptionHierarchy(atomicConcepts, subsumptionHierarchyUp, subsumptionHierarchyDown); - } - - @Override - public SortedSet<Individual> retrieval(Concept c) { - SortedSet<Individual> result = new TreeSet<Individual>(); - Description d = getKAON2Description(c); - - Query query = null; - try { - query = kaon2Reasoner.createQuery(d); - query.open(); - while (!query.afterLast()) { - // tupleBuffer = query.tupleBuffer(); - String individual = query.tupleBuffer()[0].toString(); - result.add(new Individual(individual)); - query.next(); - } - query.close(); - query.dispose(); - } catch (KAON2Exception e) { - e.printStackTrace(); - System.exit(0); - } catch (InterruptedException e) { - e.printStackTrace(); - System.exit(0); - } - return result; - } - - public boolean instanceCheck(Concept c, String s) { - boolean result; - - Description d = getKAON2Description(c); - // ev. Aufruf Main.getIndividual g�nstiger?? - org.semanticweb.kaon2.api.owl.elements.Individual i = KAON2Manager.factory().individual(s); - - // Individual i = Main.getIndividual(s); - // ClassMember cm = KAON2Manager.factory().classMember(d, i); - // Formula f = KAON2Manager.factory().classMember(d,i); - // Constant constant = KAON2Manager.factory().constant(i); - // Predicate predicate = KAON2Manager.factory().p - Literal l = KAON2Manager.factory().literal(true, d, i); - // Formula f = KAON2Manager.factory(). - - // TODO: mal mit Boris abkl�ren wie der Instance-Check - // gemacht werden soll; eine Formel zu erstellen erscheint - // mir ziemlich umst�ndlich und ist mir auch nicht ganz - // klar wie das gemacht wird - - QueryDefinition qd = KAON2Manager.factory().queryDefinition(l, new Variable[] {}); - - // kaon2Reasoner.createQuery(l, new Variable[] { }); - // Query q; - result = true; - try { - Query query = kaon2Reasoner.createQuery(qd); - query.open(); - if (query.afterLast()) - result = false; - query.close(); - query.dispose(); - } catch (KAON2Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - return result; - } - - @Override - public boolean subsumes(Concept superConcept, Concept subConcept) { - Description d1 = getKAON2Description(superConcept); - Description d2 = getKAON2Description(subConcept); - try { - return kaon2Reasoner.subsumedBy(d2, d1); - } catch (KAON2Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - throw new Error("Subsumption Error in KAON2."); - } - - @Override - public boolean isSatisfiable() { - try { - return kaon2Reasoner.isSatisfiable(); - } catch (KAON2Exception e1) { - e1.printStackTrace(); - } catch (InterruptedException e2) { - e2.printStackTrace(); - } - throw new Error("Error in satisfiability check in KAON2."); - } - - /* - private SortedSet<Concept> getMoreGeneralConcepts(Concept concept) { - // if (subsumptionHierarchy == null) { - // computeSubsumptionHierarchy(); - // } - - Description d = getKAON2Description(concept); - if(!(d instanceof OWLClass)) { - System.out.println("description: " + d); - System.out.println("concept:" + concept); - } - OWLClass owlClass = (OWLClass) d; - return getConceptsFromSubsumptionHierarchyNodes(kaon2SubsumptionHierarchy.getNodeFor( - owlClass).getParentNodes()); - } - - private SortedSet<Concept> getMoreSpecialConcepts(Concept concept) { - // if (subsumptionHierarchy == null) { - // computeSubsumptionHierarchy(); - // } - - OWLClass owlClass = (OWLClass) getKAON2Description(concept); - return getConceptsFromSubsumptionHierarchyNodes(kaon2SubsumptionHierarchy.getNodeFor( - owlClass).getChildNodes()); - } - */ - - @Override - public org.dllearner.core.dl.SubsumptionHierarchy getSubsumptionHierarchy() { - return subsumptionHierarchy; - } - - private SortedSet<Concept> getConceptsFromSubsumptionHierarchyNodes(Set<Node> nodes) { - SortedSet<Concept> ret = new TreeSet<Concept>(conceptComparator); - for (Node node : nodes) { - // es wird nur das erste Konzept unter mehreren �quivalenten - // beachtet - Iterator<OWLClass> it = node.getOWLClasses().iterator(); - ret.add(getConcept(it.next())); - if (node.getOWLClasses().size() > 1) - System.out - .println("Warning: Ontology contains equivalent classes. Only one" - + "representative of each equivalence class is used for learning. The others" - + "are ignored."); - } - return ret; - } - - public Concept getConcept(Description description) { - if (description.equals(KAON2Manager.factory().thing())) { - return new Top(); - } else if (description.equals(KAON2Manager.factory().nothing())) { - return new Bottom(); - } else if (description instanceof OWLClass) { - return new AtomicConcept(description.toString()); - } else { - throw new Error("Transforming complex KAON2 descriptions not supported."); - } - } - - @Override - public Map<org.dllearner.core.dl.Individual, SortedSet<org.dllearner.core.dl.Individual>> getRoleMembers(AtomicRole atomicRole) { - Map<org.dllearner.core.dl.Individual, SortedSet<org.dllearner.core.dl.Individual>> returnMap = new TreeMap<org.dllearner.core.dl.Individual, SortedSet<org.dllearner.core.dl.Individual>>(); - - Query query; - Object[] tupleBuffer; - ObjectProperty role = KAON2Manager.factory().objectProperty(atomicRole.getName()); - // positiver Query - try { - query = kaon2Reasoner.createQuery(role); - - query.open(); - while (!query.afterLast()) { - tupleBuffer = query.tupleBuffer(); - org.dllearner.core.dl.Individual individual1 = new org.dllearner.core.dl.Individual(tupleBuffer[0].toString()); - org.dllearner.core.dl.Individual individual2 = new org.dllearner.core.dl.Individual(tupleBuffer[1].toString()); - // addIndividualToRole(aBox.rolesPos, name, individual1, - // individual2); - Helper.addMapEntry(returnMap, individual1, individual2); - query.next(); - } - query.close(); - query.dispose(); - } catch (KAON2Exception e1) { - // TODO Auto-generated catch block - e1.printStackTrace(); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - return returnMap; - } - - public void saveOntology(File file, org.dllearner.core.OntologyFormat format) { - // File exportFile = new File(baseDir, fileName); - // String format = OntologyFileFormat.OWL_RDF; - String kaon2Format = null; - if(format.equals(org.dllearner.core.OntologyFormat.RDF_XML)) - kaon2Format = OntologyFileFormat.OWL_RDF; - else { - System.err.println("Warning: Cannot export format " + format + ". Exiting."); - System.exit(0); - } - - try { - kaon2Reasoner.getOntology().saveOntology(kaon2Format,file,"ISO-8859-1"); - } catch (KAON2Exception e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - - public static Ontology importKB(File importFile) { - - Ontology ontology = null; - try { - System.out.print("Importing " + importFile + " ... "); - long importStartTime = System.currentTimeMillis(); - - // TODO: hier wird Ontologie richtig importiert; dass muss im - // Konstruktor eventuell entsprechend angepasst werden - DefaultOntologyResolver resolver = new DefaultOntologyResolver(); - String ontologyURI = resolver.registerOntology(importFile); - KAON2Connection connection = KAON2Manager.newConnection(); - connection.setOntologyResolver(resolver); - ontology = connection.openOntology(ontologyURI, new HashMap<String,Object>()); - long importDuration = System.currentTimeMillis() - importStartTime; - System.out.println("OK (" + importDuration + " ms) [ontology URI " + ontologyURI + "]"); - } catch (KAON2Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - return ontology; - } - - private static Ontology importKB(String ontologyURI, org.dllearner.core.OntologyFormat format, KAON2Connection connection) { - Ontology ontology = null; - try { - long importStartTime = System.currentTimeMillis(); - ontology = connection.openOntology(ontologyURI, new HashMap<String, Object>()); - long importDuration = System.currentTimeMillis() - importStartTime; - System.out.println("OK (" + importDuration + " ms)"); - } catch (KAON2Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - return ontology; - } - - // Umwandlung eines Konzepts in eigener Darstellung zu einem - // KAON2-Konzept (hat die st�ndige Umwandlung eine gro�e Auswirkung - // auf die Effizienz? - es m�ssen zumindest h�ufig neue Konzepte - // erzeugt werden) - public static Description getKAON2Description(Concept concept) { - if (concept instanceof AtomicConcept) { - return KAON2Manager.factory().owlClass(((AtomicConcept) concept).getName()); - } else if (concept instanceof Bottom) { - return KAON2Manager.factory().nothing(); - } else if (concept instanceof Top) { - return KAON2Manager.factory().thing(); - } else if (concept instanceof Negation) { - return KAON2Manager.factory().objectNot( - getKAON2Description(concept.getChild(0))); - } else if (concept instanceof Conjunction) { - Description d1 = getKAON2Description(concept.getChild(0)); - Description d2 = getKAON2Description(concept.getChild(1)); - return KAON2Manager.factory().objectAnd(d1, d2); - } else if (concept instanceof Disjunction) { - Description d1 = getKAON2Description(concept.getChild(0)); - Description d2 = getKAON2Description(concept.getChild(1)); - return KAON2Manager.factory().objectOr(d1, d2); - } else if (concept instanceof All) { - ObjectProperty role = KAON2Manager.factory().objectProperty( - ((All) concept).getRole().getName()); - Description d = getKAON2Description(concept.getChild(0)); - return KAON2Manager.factory().objectAll(role, d); - } else if(concept instanceof Exists) { - ObjectProperty role = KAON2Manager.factory().objectProperty( - ((Exists) concept).getRole().getName()); - Description d = getKAON2Description(concept.getChild(0)); - return KAON2Manager.factory().objectSome(role, d); - } else if(concept instanceof MultiConjunction) { - List<Description> descriptions = new LinkedList<Description>(); - for(Concept child : concept.getChildren()) { - descriptions.add(getKAON2Description(child)); - } - return KAON2Manager.factory().objectAnd(descriptions); - } else if(concept instanceof MultiDisjunction) { - List<Description> descriptions = new LinkedList<Description>(); - for(Concept child : concept.getChildren()) { - descriptions.add(getKAON2Description(child)); - } - return KAON2Manager.factory().objectOr(descriptions); - } - - throw new IllegalArgumentException("Unsupported concept type."); - } - - public static org.semanticweb.kaon2.api.reasoner.Reasoner getKAON2Reasoner(KB kb) { - try { - KAON2Connection connection = KAON2Manager.newConnection(); - - DefaultOntologyResolver resolver = new DefaultOntologyResolver(); - resolver.registerReplacement("http://localhost/foo", "file:nothing.xml"); - connection.setOntologyResolver(resolver); - Ontology ontology = connection.createOntology("http://localhost/foo", - new HashMap<String, Object>()); - return getKAON2Reasoner(kb, ontology); - - } catch (KAON2Exception e) { - e.printStackTrace(); - return null; - } - } - - private static org.semanticweb.kaon2.api.reasoner.Reasoner getKAON2Reasoner(KB kb, - Ontology ontology) { - - org.semanticweb.kaon2.api.reasoner.Reasoner reasoner = null; - - try { - - for (AssertionalAxiom axiom : kb.getAbox()) { - if (axiom instanceof ConceptAssertion) { - Description d = getKAON2Description(((ConceptAssertion) axiom) - .getConcept()); - // TODO: checken ob unterschiedliche Objekte - // unterschiedliche - // Individuen sind, auch wenn sie den gleichen Namen haben - org.semanticweb.kaon2.api.owl.elements.Individual i = KAON2Manager.factory().individual( - ((ConceptAssertion) axiom).getIndividual().getName()); - ontology.addAxiom(KAON2Manager.factory().classMember(d, i)); - } else if (axiom instanceof RoleAssertion) { - ObjectProperty role = KAON2Manager.factory().objectProperty( - ((RoleAssertion) axiom).getRole().getName()); - org.semanticweb.kaon2.api.owl.elements.Individual i1 = KAON2Manager.factory().individual( - ((RoleAssertion) axiom).getIndividual1().getName()); - org.semanticweb.kaon2.api.owl.elements.Individual i2 = KAON2Manager.factory().individual( - ((RoleAssertion) axiom).getIndividual2().getName()); - // Code zur Unterst�tzung negierter Rollenzusicherungen, - // falls sp�ter ben�tigt - // Literal l = KAON2Manager.factory().literal(true, role, - // i1, i2); - // Rule r = KAON2Manager.factory().rule(new Formula[] {}, - // true, new Formula[] { l }); - // changes.add(new OntologyChangeEvent(r, - // OntologyChangeEvent.ChangeType.ADD)); - ontology.addAxiom(KAON2Manager.factory().objectPropertyMember(role, - i1, i2)); - } - } - - for (RBoxAxiom axiom : kb.getRbox()) { - if (axiom instanceof FunctionalRoleAxiom) { - ObjectProperty role = KAON2Manager.factory().objectProperty( - ((FunctionalRoleAxiom) axiom).getRole().getName()); - ontology.addAxiom(KAON2Manager.factory().objectPropertyAttribute( - role, ObjectPropertyAttribute.OBJECT_PROPERTY_FUNCTIONAL)); - } else if (axiom instanceof SymmetricRoleAxiom) { - ObjectProperty role = KAON2Manager.factory().objectProperty( - ((SymmetricRoleAxiom) axiom).getRole().getName()); - ontology.addAxiom(KAON2Manager.factory().objectPropertyAttribute( - role, ObjectPropertyAttribute.OBJECT_PROPERTY_SYMMETRIC)); - - // alternative Implementierung ohne Hilfskonstrukt - //ObjectProperty inverseRole = KAON2Manager.factory().objectProperty( - // ((SymmetricRoleAxiom) axiom).getRole().getName()); - //ontology.addAxiom(KAON2Manager.factory().inverseObjectProperties( - // role, inverseRole)); - //ontology.addAxiom(KAON2Manager.factory().equivalentObjectProperties(role, - // inverseRole)); - } else if (axiom instanceof TransitiveRoleAxiom) { - ObjectProperty role = KAON2Manager.factory().objectProperty( - ((SymmetricRoleAxiom) axiom).getRole().getName()); - ontology.addAxiom(KAON2Manager.factory().objectPropertyAttribute( - role, ObjectPropertyAttribute.OBJECT_PROPERTY_TRANSITIVE)); - } else if (axiom instanceof InverseRoleAxiom) { - ObjectProperty role = KAON2Manager.factory().objectProperty( - ((InverseRoleAxiom) axiom).getRole().getName()); - ObjectProperty inverseRole = KAON2Manager.factory().objectProperty( - ((InverseRoleAxiom) axiom).getInverseRole().getName()); - ontology.addAxiom(KAON2Manager.factory().inverseObjectProperties( - role, inverseRole)); - } else if (axiom instanceof SubRoleAxiom) { - ObjectProperty role = KAON2Manager.factory().objectProperty( - ((SubRoleAxiom) axiom).getRole().getName()); - ObjectProperty subRole = KAON2Manager.factory().objectProperty( - ((SubRoleAxiom) axiom).getSubRole().getName()); - ontology.addAxiom(KAON2Manager.factory().subObjectPropertyOf(subRole, - role)); - } - } - - for (TerminologicalAxiom axiom : kb.getTbox()) { - if (axiom instanceof Equality) { - Description d1 = getKAON2Description(((Equality) axiom).getConcept1()); - Description d2 = getKAON2Description(((Equality) axiom).getConcept2()); - ontology.addAxiom(KAON2Manager.factory().equivalentClasses(d1, d2)); - } else if (axiom instanceof Inclusion) { - Description subConcept = getKAON2Description(((Inclusion) axiom) - .getSubConcept()); - Description superConcept = getKAON2Description(((Inclusion) axiom) - .getSuperConcept()); - ontology.addAxiom(KAON2Manager.factory().subClassOf(subConcept, - superConcept)); - } - } - - // ontology.applyChanges(changes); - reasoner = ontology.createReasoner(); - } catch (KAON2Exception e) { - e.printStackTrace(); - System.out.println(e); - } - - return reasoner; - } - - public Map<String, SortedSet<String>> getNegatedRoleMembers(AtomicRole atomicRole) { - Map<String, SortedSet<String>> returnMap = new TreeMap<String, SortedSet<String>>(); - - Query query; - Object[] tupleBuffer; - ObjectProperty role = KAON2Manager.factory().objectProperty(atomicRole.getName()); - // negativer Query - // wird �ber Rule-ML gemacht - Variable X = KAON2Manager.factory().variable("X"); - Variable Y = KAON2Manager.factory().variable("Y"); - // ObjectProperty role = roles.get(name); - Literal l = KAON2Manager.factory().literal(true, role, X, Y); - Formula f = null; - - // falls closed world assumption, dann reicht default negation -// if (Config.owa) - // wegen BUG IN KAON2 momentan auskommentiert - // f = KAON2Manager.factory().classicalNegation(l); -// ; -// else - f = KAON2Manager.factory().defaultNegation(l); - - // if-Teil entf�llt, sobald Bug in KAON2 gefixt wurde -// if (!Config.owa) { - // ClassicalNegation cn = - // KAON2Manager.factory().classicalNegation(l); - try { - query = kaon2Reasoner.createQuery(f, new Variable[] { X, Y }, null, null); - - // BUG IN KAON2, DESWEGEN AUSKOMMENTIERT - - // System.out.println(); - query.open(); - while (!query.afterLast()) { - tupleBuffer = query.tupleBuffer(); - String individual1 = tupleBuffer[0].toString(); - String individual2 = tupleBuffer[1].toString(); - // addIndividualToRole(aBox.rolesNeg, name, - // individual1,individual2); - System.out.println(atomicRole.getName() + " " + individual1 + " " - + individual2); - query.next(); - } - query.close(); - query.dispose(); - } catch (KAON2Exception e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } -// } - - return returnMap; - } - - public void terminateReasoner() { - kaon2Reasoner.dispose(); - try { - kaon2Connection.close(); - } catch (KAON2Exception e) { - e.printStackTrace(); - } - } - - public ReasonerType getReasonerType() { - return ReasonerType.KAON2; - } - - public Set<AtomicConcept> getAtomicConcepts() { - return atomicConcepts; - } - - public Set<AtomicRole> getAtomicRoles() { - return atomicRoles; - } - - public SortedSet<Individual> getIndividuals() { - return individuals; - } - - public static Collection<ConfigOption<?>> createConfigOptions() { - Collection<ConfigOption<?>> options = new LinkedList<ConfigOption<?>>(); - options.add(CommonConfigOptions.getUNA()); - return options; - } - - /* (non-Javadoc) - * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.ConfigEntry) - */ - @Override - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { - String name = entry.getOptionName(); - if(name.equals("una")) - una = (Boolean) entry.getValue(); - } - - /* (non-Javadoc) - * @see org.dllearner.core.Component#init() - */ - @Override - public void init() { - // TODO Auto-generated method stub - - } - - // Problem: mit den eigenen Datenstrukturen wird OWL nicht vollständig - // abgedeckt z.B. data types, d.h. ohne Erweiterung der internen Strukturen - // kann nicht jede KAON2-Ontologie importiert werden - // TODO: unvollständig - /* - public static void importKAON2Ontology(KB kb, Ontology ontology) throws KAON2Exception { - Set<Axiom> axioms = ontology.createAxiomRequest().getAll(); - - // KB kb = new KB(); - for(Axiom axiom : axioms) { - if(axiom instanceof ClassMember) { - String individual = ((ClassMember)axiom).getIndividual().toString(); - Concept concept = importKAON2Concept(((ClassMember)axiom).getDescription()); - ConceptAssertion ca = new ConceptAssertion(concept, individual); - kb.addABoxAxiom(ca); - // da es eine externe API ist, können wir nicht sicher sein alle - // Axiome erwischt zu haben - } else { - throw new RuntimeException("Failed to import the following axiom: " + axiom); - } - } - } - */ - - // TODO: unvollständig - /* - private static Concept importKAON2Concept(Description description) { - if (description.equals(KAON2Manager.factory().thing())) { - return new Top(); - } else if (description.equals(KAON2Manager.factory().nothing())) { - return new Bottom(); - } else if (description instanceof OWLClass) { - return new AtomicConcept(description.toString()); - } else { - throw new RuntimeException("Failed to convert the following KAON2 description: " + description); - } - } - */ - - -} Modified: trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java 2008-02-06 15:24:06 UTC (rev 501) +++ trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java 2008-02-06 16:36:36 UTC (rev 502) @@ -38,26 +38,41 @@ import org.dllearner.core.config.InvalidConfigOptionValueException; import org.dllearner.core.config.StringConfigOption; import org.dllearner.core.dl.All; +import org.dllearner.core.dl.AssertionalAxiom; import org.dllearner.core.dl.AtomicConcept; import org.dllearner.core.dl.AtomicRole; import org.dllearner.core.dl.Bottom; import org.dllearner.core.dl.Concept; +import org.dllearner.core.dl.ConceptAssertion; import org.dllearner.core.dl.Conjunction; import org.dllearner.core.dl.Disjunction; +import org.dllearner.core.dl.Equality; import org.dllearner.core.dl.Exists; +import org.dllearner.core.dl.FunctionalRoleAxiom; +import org.dllearner.core.dl.Inclusion; import org.dllearner.core.dl.Individual; +import org.dllearner.core.dl.InverseRoleAxiom; +import org.dllearner.core.dl.KB; import org.dllearner.core.dl.MultiConjunction; import org.dllearner.core.dl.MultiDisjunction; import org.dllearner.core.dl.Negation; +import org.dllearner.core.dl.RBoxAxiom; +import org.dllearner.core.dl.RoleAssertion; import org.dllearner.core.dl.RoleHierarchy; +import org.dllearner.core.dl.SubRoleAxiom; import org.dllearner.core.dl.SubsumptionHierarchy; +import org.dllearner.core.dl.SymmetricRoleAxiom; +import org.dllearner.core.dl.TerminologicalAxiom; import org.dllearner.core.dl.Top; +import org.dllearner.core.dl.TransitiveRoleAxiom; import org.dllearner.kb.OWLFile; import org.dllearner.utilities.ConceptComparator; import org.dllearner.utilities.RoleComparator; import org.semanticweb.owl.apibinding.OWLManager; import org.semanticweb.owl.inference.OWLReasoner; import org.semanticweb.owl.inference.OWLReasonerException; +import org.semanticweb.owl.model.AddAxiom; +import org.semanticweb.owl.model.OWLAxiom; import org.semanticweb.owl.model.OWLClass; import org.semanticweb.owl.model.OWLDataFactory; import org.semanticweb.owl.model.OWLDescription; @@ -65,6 +80,7 @@ import org.semanticweb.owl.model.OWLNamedObject; import org.semanticweb.owl.model.OWLObjectProperty; import org.semanticweb.owl.model.OWLOntology; +import org.semanticweb.owl.model.OWLOntologyChangeException; import org.semanticweb.owl.model.OWLOntologyCreationException; import org.semanticweb.owl.model.OWLOntologyManager; @@ -85,6 +101,8 @@ private OWLReasoner reasoner; // the data factory is used to generate OWL API objects private OWLDataFactory factory; + // static factory + private static OWLDataFactory staticFactory = OWLManager.createOWLOntologyManager().getOWLDataFactory(); private ConceptComparator conceptComparator = new ConceptComparator(); private RoleComparator roleComparator = new RoleComparator(); @@ -503,19 +521,19 @@ return new AtomicConcept(owlClass.getURI().toString()); } - public OWLObjectProperty getOWLAPIDescription(AtomicRole role) { - return factory.getOWLObjectProperty(URI.create(role.getName())); + public static OWLObjectProperty getOWLAPIDescription(AtomicRole role) { + return staticFactory.getOWLObjectProperty(URI.create(role.getName())); } - public OWLDescription getOWLAPIDescription(Concept concept) { + public static OWLDescription getOWLAPIDescription(Concept concept) { if (concept instanceof AtomicConcept) { - return factory.getOWLClass(URI.create(((AtomicConcept)concept).getName())); + return staticFactory.getOWLClass(URI.create(((AtomicConcept)concept).getName())); } else if (concept instanceof Bottom) { - return factory.getOWLNothing(); + return staticFactory.getOWLNothing(); } else if (concept instanceof Top) { - return factory.getOWLThing(); + return staticFactory.getOWLThing(); } else if (concept instanceof Negation) { - return factory.getOWLObjectComplementOf( + return staticFactory.getOWLObjectComplementOf( getOWLAPIDescription(concept.getChild(0))); } else if (concept instanceof Conjunction) { OWLDescription d1 = getOWLAPIDescription(concept.getChild(0)); @@ -523,39 +541,133 @@ Set<OWLDescription> d = new HashSet<OWLDescription>(); d.add(d1); d.add(d2); - return factory.getOWLObjectIntersectionOf(d); + return staticFactory.getOWLObjectIntersectionOf(d); } else if (concept instanceof Disjunction) { OWLDescription d1 = getOWLAPIDescription(concept.getChild(0)); OWLDescription d2 = getOWLAPIDescription(concept.getChild(1)); Set<OWLDescription> d = new HashSet<OWLDescription>(); d.add(d1); d.add(d2); - return factory.getOWLObjectUnionOf(d); + return staticFactory.getOWLObjectUnionOf(d); } else if (concept instanceof All) { - OWLObjectProperty role = factory.getOWLObjectProperty( + OWLObjectProperty role = staticFactory.getOWLObjectProperty( URI.create(((All) concept).getRole().getName())); OWLDescription d = getOWLAPIDescription(concept.getChild(0)); - return factory.getOWLObjectAllRestriction(role, d); + return staticFactory.getOWLObjectAllRestriction(role, d); } else if(concept instanceof Exists) { - OWLObjectProperty role = factory.getOWLObjectProperty( + OWLObjectProperty role = staticFactory.getOWLObjectProperty( URI.create(((Exists) concept).getRole().getName())); OWLDescription d = getOWLAPIDescription(concept.getChild(0)); - return factory.getOWLObjectSomeRestriction(role, d); + return staticFactory.getOWLObjectSomeRestriction(role, d); } else if(concept instanceof MultiConjunction) { Set<OWLDescription> descriptions = new HashSet<OWLDescription>(); for(Concept child : concept.getChildren()) descriptions.add(getOWLAPIDescription(child)); - return factory.getOWLObjectIntersectionOf(descriptions); + return staticFactory.getOWLObjectIntersectionOf(descriptions); } else if(concept instanceof MultiDisjunction) { Set<OWLDescription> descriptions = new HashSet<OWLDescription>(); for(Concept child : concept.getChildren()) descriptions.add(getOWLAPIDescription(child)); - return factory.getOWLObjectUnionOf(descriptions); + return staticFactory.getOWLObjectUnionOf(descriptions); } throw new IllegalArgumentException("Unsupported concept type."); } + public static void fillOWLAPIOntology(OWLOntologyManager manager, OWLOntology ontology, KB kb) { + + // OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = manager.getOWLDataFactory(); + // OWLOntology ontology = manager.createOntology(ontologyURI); + try { + for (AssertionalAxiom axiom : kb.getAbox()) { + if (axiom instanceof ConceptAssertion) { + OWLDescription d = getOWLAPIDescription(((ConceptAssertion) axiom) + .getConcept()); + OWLIndividual i = factory.getOWLIndividual(URI.create( + ((ConceptAssertion) axiom).getIndividual().getName())); + OWLAxiom axiomOWLAPI = factory.getOWLClassAssertionAxiom(i, d); + AddAxiom addAxiom = new AddAxiom(ontology, axiomOWLAPI); + + manager.applyChange(addAxiom); + + } else if (axiom instanceof RoleAssertion) { + OWLObjectProperty role = factory.getOWLObjectProperty( + URI.create(((RoleAssertion) axiom).getRole().getName())); + OWLIndividual i1 = factory.getOWLIndividual( + URI.create(((RoleAssertion) axiom).getIndividual1().getName())); + OWLIndividual i2 = factory.getOWLIndividual( + URI.create(((RoleAssertion) axiom).getIndividual2().getName())); + OWLAxiom axiomOWLAPI = factory.getOWLObjectPropertyAssertionAxiom(i1, role, i2); + AddAxiom addAxiom = new AddAxiom(ontology, axiomOWLAPI); + manager.applyChange(addAxiom); + } + } + + for (RBoxAxiom axiom : kb.getRbox()) { + if (axiom instanceof FunctionalRoleAxiom) { + OWLObjectProperty role = factory.getOWLObjectProperty( + URI.create(((FunctionalRoleAxiom) axiom).getRole().getName())); + OWLAxiom axiomOWLAPI = factory.getOWLFunctionalObjectPropertyAxiom(role); + AddAxiom addAxiom = new AddAxiom(ontology, axiomOWLAPI); + manager.applyChange(addAxiom); + } else if (axiom instanceof SymmetricRoleAxiom) { + OWLObjectProperty role = factory.getOWLObjectProperty( + URI.create(((SymmetricRoleAxiom) axiom).getRole().getName())); + OWLAxiom axiomOWLAPI = factory.getOWLSymmetricObjectPropertyAxiom(role); + AddAxiom addAxiom = new AddAxiom(ontology, axiomOWLAPI); + manager.applyChange(addAxiom); + } else if (axiom instanceof TransitiveRoleAxiom) { + OWLObjectProperty role = factory.getOWLObjectProperty( + URI.create(((SymmetricRoleAxiom) axiom).getRole().getName())); + OWLAxiom axiomOWLAPI = factory.getOWLTransitiveObjectPropertyAxiom(role); + AddAxiom addAxiom = new AddAxiom(ontology, axiomOWLAPI); + manager.applyChange(addAxiom); + } else if (axiom instanceof InverseRoleAxiom) { + OWLObjectProperty role = factory.getOWLObjectProperty( + URI.create(((InverseRoleAxiom) axiom).getRole().getName())); + OWLObjectProperty inverseRole = factory.getOWLObjectProperty( + URI.create(((InverseRoleAxiom) axiom).getInverseRole().getName())); + OWLAxiom axiomOWLAPI = factory.getOWLInverseObjectPropertiesAxiom(role, inverseRole); + AddAxiom addAxiom = new AddAxiom(ontology, axiomOWLAPI); + manager.applyChange(addAxiom); + } else if (axiom instanceof SubRoleAxiom) { + OWLObjectProperty role = factory.getOWLObjectProperty( + URI.create(((SubRoleAxiom) axiom).getRole().getName())); + OWLObjectProperty subRole = factory.getOWLObjectProperty( + URI.create(((SubRoleAxiom) axiom).getSubRole().getName())); + OWLAxiom axiomOWLAPI = factory.getOWLSubObjectPropertyAxiom(subRole, role); + AddAxiom addAxiom = new AddAxiom(ontology, axiomOWLAPI); + manager.applyChange(addAxiom); + } + } + + for (TerminologicalAxiom axiom : kb.getTbox()) { + if (axiom instanceof Equality) { + OWLDescription d1 = getOWLAPIDescription(((Equality) axiom).getConcept1()); + OWLDescription d2 = getOWLAPIDescription(((Equality) axiom).getConcept2()); + Set<OWLDescription> ds = new HashSet<OWLDescription>(); + ds.add(d1); + ds.add(d2); + OWLAxiom axiomOWLAPI = factory.getOWLEquivalentClassesAxiom(ds); + AddAxiom addAxiom = new AddAxiom(ontology, axiomOWLAPI); + manager.applyChange(addAxiom); + } else if (axiom instanceof Inclusion) { + OWLDescription subConcept = getOWLAPIDescription(((Inclusion) axiom) + .getSubConcept()); + OWLDescription superConcept = getOWLAPIDescription(((Inclusion) axiom) + .getSuperConcept()); + OWLAxiom axiomOWLAPI = factory.getOWLSubClassAxiom(subConcept, superConcept); + AddAxiom addAxiom = new AddAxiom(ontology, axiomOWLAPI); + manager.applyChange(addAxiom); + } + } + } catch (OWLOntologyChangeException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + /** * Test * Modified: trunk/src/dl-learner/org/dllearner/utilities/CrossValidation.java =================================================================== --- trunk/src/dl-learner/org/dllearner/utilities/CrossValidation.java 2008-02-06 15:24:06 UTC (rev 501) +++ trunk/src/dl-learner/org/dllearner/utilities/CrossValidation.java 2008-02-06 16:36:36 UTC (rev 502) @@ -25,7 +25,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Set; -import java.util.TreeSet; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; Modified: trunk/src/dl-learner/org/dllearner/utilities/OntologyClassRewriter.java =================================================================== --- trunk/src/dl-learner/org/dllearner/utilities/OntologyClassRewriter.java 2008-02-06 15:24:06 UTC (rev 501) +++ trunk/src/dl-learner/org/dllearner/utilities/OntologyClassRewriter.java 2008-02-06 16:36:36 UTC (rev 502) @@ -1,27 +1,51 @@ +/** + * Copyright (C) 2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ + package org.dllearner.utilities; -import java.io.ByteArrayOutputStream; -import java.io.IOException; import java.net.MalformedURLException; +import java.net.URI; import java.net.URL; -import java.util.HashMap; -import java.util.Set; import org.dllearner.core.dl.Concept; import org.dllearner.parser.KBParser; import org.dllearner.parser.ParseException; -import org.dllearner.reasoning.KAON2Reasoner; -import org.semanticweb.kaon2.api.DefaultOntologyResolver; -import org.semanticweb.kaon2.api.KAON2Connection; -import org.semanticweb.kaon2.api.KAON2Exception; -import org.semanticweb.kaon2.api.KAON2Manager; -import org.semanticweb.kaon2.api.Ontology; -import org.semanticweb.kaon2.api.Request; -import org.semanticweb.kaon2.api.formatting.OntologyFileFormat; -import org.semanticweb.kaon2.api.owl.axioms.EquivalentClasses; -import org.semanticweb.kaon2.api.owl.elements.Description; -import org.semanticweb.kaon2.api.owl.elements.OWLClass; +import org.dllearner.reasoning.OWLAPIReasoner; +import org.semanticweb.owl.apibinding.OWLManager; +import org.semanticweb.owl.model.OWLClass; +import org.semanticweb.owl.model.OWLDataFactory; +import org.semanticweb.owl.model.OWLDescription; +import org.semanticweb.owl.model.OWLOntology; +import org.semanticweb.owl.model.OWLOntologyCreationException; +import org.semanticweb.owl.model.OWLOntologyManager; +/** + * Utility class to replace a definition in an OWL file by a learned + * definition. + * + * TODO: Class is currently not working. There is still some KAON2 specific + * code (commented out), which has to be converted to OWL API code. + * + * @author Jens Lehmann + * + */ public class OntologyClassRewriter { public static void main(String[] args) { @@ -37,6 +61,7 @@ System.out.println(rewrittenOntology); } + @SuppressWarnings({"unused"}) public static String rewriteOntology(String urlString, String className, String newConceptString) { try { @@ -45,44 +70,54 @@ Concept newConceptInternal = KBParser.parseConcept(newConceptString); // umwandeln in interne KAON2-Darstellung (bereits im DL-Learner implementiert) - Description newConceptKAON2 = KAON2Reasoner.getKAON2Description(newConceptInternal); + // Description newConceptKAON2 = KAON2Reasoner.getKAON2Description(newConceptInternal); + OWLDescription newConceptOWLAPI = OWLAPIReasoner.getOWLAPIDescription(newConceptInternal); // Umwandlung Klassenname in atomate KAON2-Klasse - OWLClass classKAON2 = KAON2Manager.factory().owlClass(className); + // OWLClass classKAON2 = KAON2Manager.factory().owlClass(className); + OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); + OWLDataFactory factory = manager.getOWLDataFactory(); + OWLClass classOWLAPI = factory.getOWLClass(URI.create(className)); // Test, ob es eine richtige URL ist (ansonsten wird Exception geworfen) new URL(urlString); // einlesen der Ontologie - DefaultOntologyResolver resolver = new DefaultOntologyResolver(); - KAON2Connection connection = KAON2Manager.newConnection(); - connection.setOntologyResolver(resolver); - Ontology ontology = connection.openOntology(urlString, new HashMap<String,Object>()); + // DefaultOntologyResolver resolver = new DefaultOntologyResolver(); + // KAON2Connection connection = KAON2Manager.newConnection(); + // connection.setOntologyResolver(resolver); + // Ontology ontology = connection.openOntology(urlString, new HashMap<String,Object>()); - // suchen von Äquivalenzaxiomen - Request<EquivalentClasses> equivalenceAxiomsRequest = ontology.createAxiomRequest(EquivalentClasses.class); - Set<EquivalentClasses> equivalenceAxioms = equivalenceAxiomsRequest.get(); + OWLOntology ontology = manager.loadOntologyFromPhysicalURI(URI.create(urlString)); - for(EquivalentClasses eq : equivalenceAxioms) { - Set<Description> eqDescriptions = eq.getDescriptions(); - if(eqDescriptions.size() != 2) - System.out.println("Warning: Rewriting more than two equivalent descriptions not supported yet." + - " Possibly incorrect ontology returned."); - - // entfernen aller Äquivalenzaxiome, die die Klasse enthalten - if(eqDescriptions.contains(classKAON2)) - ontology.removeAxiom(eq); - } + // TODO - // hinzufügen des neuen Äquivalenzaxioms - EquivalentClasses eqNew = KAON2Manager.factory().equivalentClasses(classKAON2, newConceptKAON2); - ontology.addAxiom(eqNew); + // suchen von Äquivalenzaxiomen +// Request<EquivalentClasses> equivalenceAxiomsRequest = ontology.createAxiomRequest(EquivalentClasses.class); +// Set<EquivalentClasses> equivalenceAxioms = equivalenceAxiomsRequest.get(); +// +// for(EquivalentClasses eq : equivalenceAxioms) { +// Set<Description> eqDescriptions = eq.getDescriptions(); +// if(eqDescriptions.size() != 2) +// System.out.println("Warning: Rewriting more than two equivalent descriptions not supported yet." + +// " Possibly incorrect ontology returned."); +// +// // entfernen aller Äquivalenzaxiome, die die Klasse enthalten +// if(eqDescriptions.contains(classKAON2)) +// ontology.removeAxiom(eq); +// } +// +// // hinzufügen des neuen Äquivalenzaxioms +// EquivalentClasses eqNew = KAON2Manager.factory().equivalentClasses(classKAON2, newConceptKAON2); +// ontology.addAxiom(eqNew); +// +// // umwandeln der Ontologie in einen String +// ByteArrayOutputStream os = new ByteArrayOutputStream(); +// ontology.saveOntology(OntologyFileFormat.OWL_RDF,os,"ISO-8859-1"); +// +// return os.toString(); + return ""; - // umwandeln der Ontologie in einen String - ByteArrayOutputStream os = new ByteArrayOutputStream(); - ontology.saveOntology(OntologyFileFormat.OWL_RDF,os,"ISO-8859-1"); - return os.toString(); - // in einigen der folgenden Fälle sollten im Web-Service Exceptions geworfen // werden (throws ...) z.B. bei ParseException } catch (ParseException e) { @@ -92,16 +127,10 @@ // TODO Auto-generated catch block e.printStackTrace(); System.out.println("Syntactically incorrect URL."); - } catch (KAON2Exception e) { + } catch (OWLOntologyCreationException e) { // TODO Auto-generated catch block e.printStackTrace(); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } catch (IOException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } + } throw new Error("Ontology could not be rewritten. Exiting."); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-02-06 17:04:08
|
Revision: 504 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=504&view=rev Author: jenslehmann Date: 2008-02-06 09:04:01 -0800 (Wed, 06 Feb 2008) Log Message: ----------- updated startup files and classpath entries Modified Paths: -------------- trunk/bin/dllearner trunk/bin/dllearner.bat trunk/bin/quickstart trunk/bin/quickstart.bat trunk/bin/ws trunk/bin/ws.bat trunk/doc/eclipse/classpath.dist trunk/src/dl-learner/org/dllearner/Info.java Modified: trunk/bin/dllearner =================================================================== --- trunk/bin/dllearner 2008-02-06 16:36:56 UTC (rev 503) +++ trunk/bin/dllearner 2008-02-06 17:04:01 UTC (rev 504) @@ -1 +1 @@ -java -cp .:./lib/dig1.1-xmlbeans.jar:./lib/jena/antlr-2.7.5.jar:./lib/jena/commons-logging.jar:./lib/jena/concurrent.jar:./lib/jena/icu4j_3_4.jar:./lib/jena/iri.jar:./lib/jena/jena.jar:./lib/jena/xercesImpl.jar:./lib/kaon2.jar:./lib/owlapi/antlr-runtime-3.0.jar:./lib/owlapi/commons-lang-2.2.jar:./lib/owlapi/owlapi-api.jar:./lib/owlapi/owlapi-apibinding.jar:./lib/owlapi/owlapi-change.jar:./lib/owlapi/owlapi-debugging.jar:./lib/owlapi/owlapi-dig1_1.jar:./lib/owlapi/owlapi-functionalparser.jar:./lib/owlapi/owlapi-functionalrenderer.jar:./lib/owlapi/owlapi-impl.jar:./lib/owlapi/owlapi-krssparser.jar:./lib/owlapi/owlapi-mansyntaxparser.jar:./lib/owlapi/owlapi-mansyntaxrenderer.jar:./lib/owlapi/owlapi-metrics.jar:./lib/owlapi/owlapi-oboparser.jar:./lib/owlapi/owlapi-owlxmlparser.jar:./lib/owlapi/owlapi-owlxmlrenderer.jar:./lib/owlapi/owlapi-rdfapi.jar:./lib/owlapi/owlapi-rdfxmlparser.jar:./lib/owlapi/owlapi-rdfxmlrenderer.jar:./lib/owlapi/owlapi-util.jar:./lib/xbean.jar:./lib/dllearner.jar org.dllearner.cli.start $@ \ No newline at end of file +java -cp .:./lib/dig1.1-xmlbeans.jar:./lib/fact/FaCT++OWLAPI-v1.1.10+.jar:./lib/jena/antlr-2.7.5.jar:./lib/jena/arq.jar:./lib/jena/commons-logging-1.1.jar:./lib/jena/concurrent.jar:./lib/jena/icu4j_3_4.jar:./lib/jena/iri.jar:./lib/jena/jena.jar:./lib/jena/json.jar:./lib/jena/xercesImpl.jar:./lib/junit-4.4.jar:./lib/log4j.jar:./lib/owlapi/antlr-runtime-3.0.jar:./lib/owlapi/commons-lang-2.2.jar:./lib/owlapi/owlapi-api.jar:./lib/owlapi/owlapi-apibinding.jar:./lib/owlapi/owlapi-change.jar:./lib/owlapi/owlapi-debugging.jar:./lib/owlapi/owlapi-dig1_1.jar:./lib/owlapi/owlapi-functionalparser.jar:./lib/owlapi/owlapi-functionalrenderer.jar:./lib/owlapi/owlapi-impl.jar:./lib/owlapi/owlapi-krssparser.jar:./lib/owlapi/owlapi-mansyntaxparser.jar:./lib/owlapi/owlapi-mansyntaxrenderer.jar:./lib/owlapi/owlapi-metrics.jar:./lib/owlapi/owlapi-oboparser.jar:./lib/owlapi/owlapi-owlxmlparser.jar:./lib/owlapi/owlapi-owlxmlrenderer.jar:./lib/owlapi/owlapi-rdfapi.jar:./lib/owlapi/owlapi-rdfxmlparser.jar:./lib/owlapi/owlapi-rdfxmlrenderer.jar:./lib/owlapi/owlapi-util.jar:./lib/pellet/aterm-java-1.6.jar:./lib/pellet/pellet.jar:./lib/pellet/relaxngDatatype.jar:./lib/pellet/xsdlib.jar:./lib/xbean.jar:./lib/dllearner.jar org.dllearner.cli.start $@ \ No newline at end of file Modified: trunk/bin/dllearner.bat =================================================================== --- trunk/bin/dllearner.bat 2008-02-06 16:36:56 UTC (rev 503) +++ trunk/bin/dllearner.bat 2008-02-06 17:04:01 UTC (rev 504) @@ -1 +1 @@ -java -cp .;.\lib\dig1.1-xmlbeans.jar;.\lib\jena\antlr-2.7.5.jar;.\lib\jena\commons-logging.jar;.\lib\jena\concurrent.jar;.\lib\jena\icu4j_3_4.jar;.\lib\jena\iri.jar;.\lib\jena\jena.jar;.\lib\jena\xercesImpl.jar;.\lib\kaon2.jar;.\lib\owlapi\antlr-runtime-3.0.jar;.\lib\owlapi\commons-lang-2.2.jar;.\lib\owlapi\owlapi-api.jar;.\lib\owlapi\owlapi-apibinding.jar;.\lib\owlapi\owlapi-change.jar;.\lib\owlapi\owlapi-debugging.jar;.\lib\owlapi\owlapi-dig1_1.jar;.\lib\owlapi\owlapi-functionalparser.jar;.\lib\owlapi\owlapi-functionalrenderer.jar;.\lib\owlapi\owlapi-impl.jar;.\lib\owlapi\owlapi-krssparser.jar;.\lib\owlapi\owlapi-mansyntaxparser.jar;.\lib\owlapi\owlapi-mansyntaxrenderer.jar;.\lib\owlapi\owlapi-metrics.jar;.\lib\owlapi\owlapi-oboparser.jar;.\lib\owlapi\owlapi-owlxmlparser.jar;.\lib\owlapi\owlapi-owlxmlrenderer.jar;.\lib\owlapi\owlapi-rdfapi.jar;.\lib\owlapi\owlapi-rdfxmlparser.jar;.\lib\owlapi\owlapi-rdfxmlrenderer.jar;.\lib\owlapi\owlapi-util.jar;.\lib\xbean.jar;.\lib\dllearner.jar org.dllearner.cli.Start %* \ No newline at end of file +java -cp .;.\lib\dig1.1-xmlbeans.jar;.\lib\fact\FaCT++OWLAPI-v1.1.10+.jar;.\lib\jena\antlr-2.7.5.jar;.\lib\jena\arq.jar;.\lib\jena\commons-logging-1.1.jar;.\lib\jena\concurrent.jar;.\lib\jena\icu4j_3_4.jar;.\lib\jena\iri.jar;.\lib\jena\jena.jar;.\lib\jena\json.jar;.\lib\jena\xercesImpl.jar;.\lib\junit-4.4.jar;.\lib\log4j.jar;.\lib\owlapi\antlr-runtime-3.0.jar;.\lib\owlapi\commons-lang-2.2.jar;.\lib\owlapi\owlapi-api.jar;.\lib\owlapi\owlapi-apibinding.jar;.\lib\owlapi\owlapi-change.jar;.\lib\owlapi\owlapi-debugging.jar;.\lib\owlapi\owlapi-dig1_1.jar;.\lib\owlapi\owlapi-functionalparser.jar;.\lib\owlapi\owlapi-functionalrenderer.jar;.\lib\owlapi\owlapi-impl.jar;.\lib\owlapi\owlapi-krssparser.jar;.\lib\owlapi\owlapi-mansyntaxparser.jar;.\lib\owlapi\owlapi-mansyntaxrenderer.jar;.\lib\owlapi\owlapi-metrics.jar;.\lib\owlapi\owlapi-oboparser.jar;.\lib\owlapi\owlapi-owlxmlparser.jar;.\lib\owlapi\owlapi-owlxmlrenderer.jar;.\lib\owlapi\owlapi-rdfapi.jar;.\lib\owlapi\owlapi-rdfxmlparser.jar;.\lib\owlapi\owlapi-rdfxmlrenderer.jar;.\lib\owlapi\owlapi-util.jar;.\lib\pellet\aterm-java-1.6.jar;.\lib\pellet\pellet.jar;.\lib\pellet\relaxngDatatype.jar;.\lib\pellet\xsdlib.jar;.\lib\xbean.jar;.\lib\dllearner.jar org.dllearner.cli.Start %* \ No newline at end of file Modified: trunk/bin/quickstart =================================================================== --- trunk/bin/quickstart 2008-02-06 16:36:56 UTC (rev 503) +++ trunk/bin/quickstart 2008-02-06 17:04:01 UTC (rev 504) @@ -1 +1 @@ -java -cp .:./lib/dig1.1-xmlbeans.jar:./lib/jena/antlr-2.7.5.jar:./lib/jena/commons-logging.jar:./lib/jena/concurrent.jar:./lib/jena/icu4j_3_4.jar:./lib/jena/iri.jar:./lib/jena/jena.jar:./lib/jena/xercesImpl.jar:./lib/kaon2.jar:./lib/owlapi/antlr-runtime-3.0.jar:./lib/owlapi/commons-lang-2.2.jar:./lib/owlapi/owlapi-api.jar:./lib/owlapi/owlapi-apibinding.jar:./lib/owlapi/owlapi-change.jar:./lib/owlapi/owlapi-debugging.jar:./lib/owlapi/owlapi-dig1_1.jar:./lib/owlapi/owlapi-functionalparser.jar:./lib/owlapi/owlapi-functionalrenderer.jar:./lib/owlapi/owlapi-impl.jar:./lib/owlapi/owlapi-krssparser.jar:./lib/owlapi/owlapi-mansyntaxparser.jar:./lib/owlapi/owlapi-mansyntaxrenderer.jar:./lib/owlapi/owlapi-metrics.jar:./lib/owlapi/owlapi-oboparser.jar:./lib/owlapi/owlapi-owlxmlparser.jar:./lib/owlapi/owlapi-owlxmlrenderer.jar:./lib/owlapi/owlapi-rdfapi.jar:./lib/owlapi/owlapi-rdfxmlparser.jar:./lib/owlapi/owlapi-rdfxmlrenderer.jar:./lib/owlapi/owlapi-util.jar:./lib/xbean.jar:./lib/dllearner.jar org.dllearner.cli.QuickStart \ No newline at end of file +java -cp .:./lib/dig1.1-xmlbeans.jar:./lib/fact/FaCT++OWLAPI-v1.1.10+.jar:./lib/jena/antlr-2.7.5.jar:./lib/jena/arq.jar:./lib/jena/commons-logging-1.1.jar:./lib/jena/concurrent.jar:./lib/jena/icu4j_3_4.jar:./lib/jena/iri.jar:./lib/jena/jena.jar:./lib/jena/json.jar:./lib/jena/xercesImpl.jar:./lib/junit-4.4.jar:./lib/log4j.jar:./lib/owlapi/antlr-runtime-3.0.jar:./lib/owlapi/commons-lang-2.2.jar:./lib/owlapi/owlapi-api.jar:./lib/owlapi/owlapi-apibinding.jar:./lib/owlapi/owlapi-change.jar:./lib/owlapi/owlapi-debugging.jar:./lib/owlapi/owlapi-dig1_1.jar:./lib/owlapi/owlapi-functionalparser.jar:./lib/owlapi/owlapi-functionalrenderer.jar:./lib/owlapi/owlapi-impl.jar:./lib/owlapi/owlapi-krssparser.jar:./lib/owlapi/owlapi-mansyntaxparser.jar:./lib/owlapi/owlapi-mansyntaxrenderer.jar:./lib/owlapi/owlapi-metrics.jar:./lib/owlapi/owlapi-oboparser.jar:./lib/owlapi/owlapi-owlxmlparser.jar:./lib/owlapi/owlapi-owlxmlrenderer.jar:./lib/owlapi/owlapi-rdfapi.jar:./lib/owlapi/owlapi-rdfxmlparser.jar:./lib/owlapi/owlapi-rdfxmlrenderer.jar:./lib/owlapi/owlapi-util.jar:./lib/pellet/aterm-java-1.6.jar:./lib/pellet/pellet.jar:./lib/pellet/relaxngDatatype.jar:./lib/pellet/xsdlib.jar:./lib/xbean.jar:./lib/dllearner.jar org.dllearner.cli.QuickStart \ No newline at end of file Modified: trunk/bin/quickstart.bat =================================================================== --- trunk/bin/quickstart.bat 2008-02-06 16:36:56 UTC (rev 503) +++ trunk/bin/quickstart.bat 2008-02-06 17:04:01 UTC (rev 504) @@ -1 +1 @@ -java -cp .;.\lib\dig1.1-xmlbeans.jar;.\lib\jena\antlr-2.7.5.jar;.\lib\jena\commons-logging.jar;.\lib\jena\concurrent.jar;.\lib\jena\icu4j_3_4.jar;.\lib\jena\iri.jar;.\lib\jena\jena.jar;.\lib\jena\xercesImpl.jar;.\lib\kaon2.jar;.\lib\owlapi\antlr-runtime-3.0.jar;.\lib\owlapi\commons-lang-2.2.jar;.\lib\owlapi\owlapi-api.jar;.\lib\owlapi\owlapi-apibinding.jar;.\lib\owlapi\owlapi-change.jar;.\lib\owlapi\owlapi-debugging.jar;.\lib\owlapi\owlapi-dig1_1.jar;.\lib\owlapi\owlapi-functionalparser.jar;.\lib\owlapi\owlapi-functionalrenderer.jar;.\lib\owlapi\owlapi-impl.jar;.\lib\owlapi\owlapi-krssparser.jar;.\lib\owlapi\owlapi-mansyntaxparser.jar;.\lib\owlapi\owlapi-mansyntaxrenderer.jar;.\lib\owlapi\owlapi-metrics.jar;.\lib\owlapi\owlapi-oboparser.jar;.\lib\owlapi\owlapi-owlxmlparser.jar;.\lib\owlapi\owlapi-owlxmlrenderer.jar;.\lib\owlapi\owlapi-rdfapi.jar;.\lib\owlapi\owlapi-rdfxmlparser.jar;.\lib\owlapi\owlapi-rdfxmlrenderer.jar;.\lib\owlapi\owlapi-util.jar;.\lib\xbean.jar;.\lib\dllearner.jar org.dllearner.cli.QuickStart \ No newline at end of file +java -cp .;.\lib\dig1.1-xmlbeans.jar;.\lib\fact\FaCT++OWLAPI-v1.1.10+.jar;.\lib\jena\antlr-2.7.5.jar;.\lib\jena\arq.jar;.\lib\jena\commons-logging-1.1.jar;.\lib\jena\concurrent.jar;.\lib\jena\icu4j_3_4.jar;.\lib\jena\iri.jar;.\lib\jena\jena.jar;.\lib\jena\json.jar;.\lib\jena\xercesImpl.jar;.\lib\junit-4.4.jar;.\lib\log4j.jar;.\lib\owlapi\antlr-runtime-3.0.jar;.\lib\owlapi\commons-lang-2.2.jar;.\lib\owlapi\owlapi-api.jar;.\lib\owlapi\owlapi-apibinding.jar;.\lib\owlapi\owlapi-change.jar;.\lib\owlapi\owlapi-debugging.jar;.\lib\owlapi\owlapi-dig1_1.jar;.\lib\owlapi\owlapi-functionalparser.jar;.\lib\owlapi\owlapi-functionalrenderer.jar;.\lib\owlapi\owlapi-impl.jar;.\lib\owlapi\owlapi-krssparser.jar;.\lib\owlapi\owlapi-mansyntaxparser.jar;.\lib\owlapi\owlapi-mansyntaxrenderer.jar;.\lib\owlapi\owlapi-metrics.jar;.\lib\owlapi\owlapi-oboparser.jar;.\lib\owlapi\owlapi-owlxmlparser.jar;.\lib\owlapi\owlapi-owlxmlrenderer.jar;.\lib\owlapi\owlapi-rdfapi.jar;.\lib\owlapi\owlapi-rdfxmlparser.jar;.\lib\owlapi\owlapi-rdfxmlrenderer.jar;.\lib\owlapi\owlapi-util.jar;.\lib\pellet\aterm-java-1.6.jar;.\lib\pellet\pellet.jar;.\lib\pellet\relaxngDatatype.jar;.\lib\pellet\xsdlib.jar;.\lib\xbean.jar;.\lib\dllearner.jar org.dllearner.cli.QuickStart \ No newline at end of file Modified: trunk/bin/ws =================================================================== --- trunk/bin/ws 2008-02-06 16:36:56 UTC (rev 503) +++ trunk/bin/ws 2008-02-06 17:04:01 UTC (rev 504) @@ -1 +1 @@ -java -cp .:./lib/dig1.1-xmlbeans.jar:./lib/jena/antlr-2.7.5.jar:./lib/jena/commons-logging.jar:./lib/jena/concurrent.jar:./lib/jena/icu4j_3_4.jar:./lib/jena/iri.jar:./lib/jena/jena.jar:./lib/jena/xercesImpl.jar:./lib/kaon2.jar:./lib/owlapi/antlr-runtime-3.0.jar:./lib/owlapi/commons-lang-2.2.jar:./lib/owlapi/owlapi-api.jar:./lib/owlapi/owlapi-apibinding.jar:./lib/owlapi/owlapi-change.jar:./lib/owlapi/owlapi-debugging.jar:./lib/owlapi/owlapi-dig1_1.jar:./lib/owlapi/owlapi-functionalparser.jar:./lib/owlapi/owlapi-functionalrenderer.jar:./lib/owlapi/owlapi-impl.jar:./lib/owlapi/owlapi-krssparser.jar:./lib/owlapi/owlapi-mansyntaxparser.jar:./lib/owlapi/owlapi-mansyntaxrenderer.jar:./lib/owlapi/owlapi-metrics.jar:./lib/owlapi/owlapi-oboparser.jar:./lib/owlapi/owlapi-owlxmlparser.jar:./lib/owlapi/owlapi-owlxmlrenderer.jar:./lib/owlapi/owlapi-rdfapi.jar:./lib/owlapi/owlapi-rdfxmlparser.jar:./lib/owlapi/owlapi-rdfxmlrenderer.jar:./lib/owlapi/owlapi-util.jar:./lib/xbean.jar:./lib/dllearner.jar org.dllearner.server.DLLearnerWSStart $@ \ No newline at end of file +java -cp .:./lib/dig1.1-xmlbeans.jar:./lib/fact/FaCT++OWLAPI-v1.1.10+.jar:./lib/jena/antlr-2.7.5.jar:./lib/jena/arq.jar:./lib/jena/commons-logging-1.1.jar:./lib/jena/concurrent.jar:./lib/jena/icu4j_3_4.jar:./lib/jena/iri.jar:./lib/jena/jena.jar:./lib/jena/json.jar:./lib/jena/xercesImpl.jar:./lib/junit-4.4.jar:./lib/log4j.jar:./lib/owlapi/antlr-runtime-3.0.jar:./lib/owlapi/commons-lang-2.2.jar:./lib/owlapi/owlapi-api.jar:./lib/owlapi/owlapi-apibinding.jar:./lib/owlapi/owlapi-change.jar:./lib/owlapi/owlapi-debugging.jar:./lib/owlapi/owlapi-dig1_1.jar:./lib/owlapi/owlapi-functionalparser.jar:./lib/owlapi/owlapi-functionalrenderer.jar:./lib/owlapi/owlapi-impl.jar:./lib/owlapi/owlapi-krssparser.jar:./lib/owlapi/owlapi-mansyntaxparser.jar:./lib/owlapi/owlapi-mansyntaxrenderer.jar:./lib/owlapi/owlapi-metrics.jar:./lib/owlapi/owlapi-oboparser.jar:./lib/owlapi/owlapi-owlxmlparser.jar:./lib/owlapi/owlapi-owlxmlrenderer.jar:./lib/owlapi/owlapi-rdfapi.jar:./lib/owlapi/owlapi-rdfxmlparser.jar:./lib/owlapi/owlapi-rdfxmlrenderer.jar:./lib/owlapi/owlapi-util.jar:./lib/pellet/aterm-java-1.6.jar:./lib/pellet/pellet.jar:./lib/pellet/relaxngDatatype.jar:./lib/pellet/xsdlib.jar:./lib/xbean.jar:./lib/dllearner.jar org.dllearner.server.DLLearnerWSStart $@ \ No newline at end of file Modified: trunk/bin/ws.bat =================================================================== --- trunk/bin/ws.bat 2008-02-06 16:36:56 UTC (rev 503) +++ trunk/bin/ws.bat 2008-02-06 17:04:01 UTC (rev 504) @@ -1 +1 @@ -java -cp .;.\lib\dig1.1-xmlbeans.jar;.\lib\jena\antlr-2.7.5.jar;.\lib\jena\commons-logging.jar;.\lib\jena\concurrent.jar;.\lib\jena\icu4j_3_4.jar;.\lib\jena\iri.jar;.\lib\jena\jena.jar;.\lib\jena\xercesImpl.jar;.\lib\kaon2.jar;.\lib\owlapi\antlr-runtime-3.0.jar;.\lib\owlapi\commons-lang-2.2.jar;.\lib\owlapi\owlapi-api.jar;.\lib\owlapi\owlapi-apibinding.jar;.\lib\owlapi\owlapi-change.jar;.\lib\owlapi\owlapi-debugging.jar;.\lib\owlapi\owlapi-dig1_1.jar;.\lib\owlapi\owlapi-functionalparser.jar;.\lib\owlapi\owlapi-functionalrenderer.jar;.\lib\owlapi\owlapi-impl.jar;.\lib\owlapi\owlapi-krssparser.jar;.\lib\owlapi\owlapi-mansyntaxparser.jar;.\lib\owlapi\owlapi-mansyntaxrenderer.jar;.\lib\owlapi\owlapi-metrics.jar;.\lib\owlapi\owlapi-oboparser.jar;.\lib\owlapi\owlapi-owlxmlparser.jar;.\lib\owlapi\owlapi-owlxmlrenderer.jar;.\lib\owlapi\owlapi-rdfapi.jar;.\lib\owlapi\owlapi-rdfxmlparser.jar;.\lib\owlapi\owlapi-rdfxmlrenderer.jar;.\lib\owlapi\owlapi-util.jar;.\lib\xbean.jar;.\lib\dllearner.jar org.dllearner.server.DLLearnerWSStart %* \ No newline at end of file +java -cp .;.\lib\dig1.1-xmlbeans.jar;.\lib\fact\FaCT++OWLAPI-v1.1.10+.jar;.\lib\jena\antlr-2.7.5.jar;.\lib\jena\arq.jar;.\lib\jena\commons-logging-1.1.jar;.\lib\jena\concurrent.jar;.\lib\jena\icu4j_3_4.jar;.\lib\jena\iri.jar;.\lib\jena\jena.jar;.\lib\jena\json.jar;.\lib\jena\xercesImpl.jar;.\lib\junit-4.4.jar;.\lib\log4j.jar;.\lib\owlapi\antlr-runtime-3.0.jar;.\lib\owlapi\commons-lang-2.2.jar;.\lib\owlapi\owlapi-api.jar;.\lib\owlapi\owlapi-apibinding.jar;.\lib\owlapi\owlapi-change.jar;.\lib\owlapi\owlapi-debugging.jar;.\lib\owlapi\owlapi-dig1_1.jar;.\lib\owlapi\owlapi-functionalparser.jar;.\lib\owlapi\owlapi-functionalrenderer.jar;.\lib\owlapi\owlapi-impl.jar;.\lib\owlapi\owlapi-krssparser.jar;.\lib\owlapi\owlapi-mansyntaxparser.jar;.\lib\owlapi\owlapi-mansyntaxrenderer.jar;.\lib\owlapi\owlapi-metrics.jar;.\lib\owlapi\owlapi-oboparser.jar;.\lib\owlapi\owlapi-owlxmlparser.jar;.\lib\owlapi\owlapi-owlxmlrenderer.jar;.\lib\owlapi\owlapi-rdfapi.jar;.\lib\owlapi\owlapi-rdfxmlparser.jar;.\lib\owlapi\owlapi-rdfxmlrenderer.jar;.\lib\owlapi\owlapi-util.jar;.\lib\pellet\aterm-java-1.6.jar;.\lib\pellet\pellet.jar;.\lib\pellet\relaxngDatatype.jar;.\lib\pellet\xsdlib.jar;.\lib\xbean.jar;.\lib\dllearner.jar org.dllearner.server.DLLearnerWSStart %* \ No newline at end of file Modified: trunk/doc/eclipse/classpath.dist =================================================================== --- trunk/doc/eclipse/classpath.dist 2008-02-06 16:36:56 UTC (rev 503) +++ trunk/doc/eclipse/classpath.dist 2008-02-06 17:04:01 UTC (rev 504) @@ -3,15 +3,6 @@ <classpathentry kind="src" path="src/dl-learner"/> <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/> <classpathentry kind="lib" path="lib/dig1.1-xmlbeans.jar"/> - <classpathentry kind="lib" path="lib/kaon2.jar"/> - <classpathentry kind="lib" path="lib/xbean.jar"/> - <classpathentry kind="lib" path="lib/jena/antlr-2.7.5.jar"/> - <classpathentry kind="lib" path="lib/jena/commons-logging.jar"/> - <classpathentry kind="lib" path="lib/jena/concurrent.jar"/> - <classpathentry kind="lib" path="lib/jena/icu4j_3_4.jar"/> - <classpathentry kind="lib" path="lib/jena/iri.jar"/> - <classpathentry kind="lib" path="lib/jena/jena.jar"/> - <classpathentry kind="lib" path="lib/jena/xercesImpl.jar"/> <classpathentry kind="lib" path="lib/owlapi/antlr-runtime-3.0.jar"/> <classpathentry kind="lib" path="lib/owlapi/commons-lang-2.2.jar"/> <classpathentry kind="lib" path="lib/owlapi/owlapi-api.jar"/> @@ -33,5 +24,22 @@ <classpathentry kind="lib" path="lib/owlapi/owlapi-rdfxmlparser.jar"/> <classpathentry kind="lib" path="lib/owlapi/owlapi-rdfxmlrenderer.jar"/> <classpathentry kind="lib" path="lib/owlapi/owlapi-util.jar"/> + <classpathentry kind="lib" path="lib/jena/antlr-2.7.5.jar"/> + <classpathentry kind="lib" path="lib/jena/concurrent.jar"/> + <classpathentry kind="lib" path="lib/jena/icu4j_3_4.jar"/> + <classpathentry kind="lib" path="lib/jena/iri.jar"/> + <classpathentry kind="lib" path="lib/jena/jena.jar"/> + <classpathentry kind="lib" path="lib/jena/xercesImpl.jar"/> + <classpathentry kind="lib" path="lib/xbean.jar"/> + <classpathentry kind="lib" path="lib/fact/FaCT++OWLAPI-v1.1.10+.jar"/> + <classpathentry kind="lib" path="lib/pellet/aterm-java-1.6.jar"/> + <classpathentry kind="lib" path="lib/pellet/pellet.jar"/> + <classpathentry kind="lib" path="lib/pellet/relaxngDatatype.jar"/> + <classpathentry kind="lib" path="lib/pellet/xsdlib.jar"/> + <classpathentry kind="lib" path="lib/jena/arq.jar"/> + <classpathentry kind="lib" path="lib/jena/commons-logging-1.1.jar"/> + <classpathentry kind="lib" path="lib/junit-4.4.jar"/> + <classpathentry kind="lib" path="lib/log4j.jar"/> + <classpathentry kind="lib" path="lib/jena/json.jar"/> <classpathentry kind="output" path="classes"/> </classpath> Modified: trunk/src/dl-learner/org/dllearner/Info.java =================================================================== --- trunk/src/dl-learner/org/dllearner/Info.java 2008-02-06 16:36:56 UTC (rev 503) +++ trunk/src/dl-learner/org/dllearner/Info.java 2008-02-06 17:04:01 UTC (rev 504) @@ -3,6 +3,6 @@ package org.dllearner; public class Info { - public static final String build = "2007-08-29"; + public static final String build = "2008-02-06"; } \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-02-07 18:52:48
|
Revision: 518 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=518&view=rev Author: jenslehmann Date: 2008-02-07 10:52:41 -0800 (Thu, 07 Feb 2008) Log Message: ----------- removed unused config folder Added Paths: ----------- trunk/examples/sparql/config.owl Removed Paths: ------------- trunk/config/ Copied: trunk/examples/sparql/config.owl (from rev 515, trunk/config/config.owl) =================================================================== --- trunk/examples/sparql/config.owl (rev 0) +++ trunk/examples/sparql/config.owl 2008-02-07 18:52:41 UTC (rev 518) @@ -0,0 +1,172 @@ +<?xml version="1.0"?> +<!DOCTYPE rdf:RDF [ + <!ENTITY config "http://www.extraction.org/config#"> + <!ENTITY owl "http://www.w3.org/2002/07/owl#"> + <!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#"> + <!ENTITY rdfs "http://www.w3.org/2000/01/rdf-schema#"> + <!ENTITY xsd "http://www.w3.org/2001/XMLSchema#"> +]> +<rdf:RDF xml:base="http://www.extraction.org/config" xmlns:config="http://www.extraction.org/config#" xmlns:owl="http://www.w3.org/2002/07/owl#" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"> + <!-- Ontology Information --> + <owl:Ontology rdf:about=""> + <owl:versionInfo xml:lang="en">version 0.1</owl:versionInfo> + </owl:Ontology> + <!-- Classes --> + <owl:Class rdf:about="#Configuration"/> + <owl:Class rdf:about="#FilterSet"/> + <owl:Class rdf:about="#GETParameter"/> + <owl:Class rdf:about="#ObjectFilterSet"> + <rdfs:subClassOf rdf:resource="#FilterSet"/> + </owl:Class> + <owl:Class rdf:about="#PredicateFilterSet"> + <rdfs:subClassOf rdf:resource="#FilterSet"/> + </owl:Class> + <owl:Class rdf:about="#SparqlEndpoint"/> + <owl:Class rdf:about="#TypedQuery"/> + <!-- Annotation Properties --> + <owl:AnnotationProperty rdf:about="&rdfs;comment"/> + <owl:AnnotationProperty rdf:about="&owl;versionInfo"/> + <!-- Datatype Properties --> + <owl:DatatypeProperty rdf:about="#filtersURI"> + <rdfs:domain rdf:resource="#ObjectFilterSet"/> + <rdfs:domain rdf:resource="#PredicateFilterSet"/> + </owl:DatatypeProperty> + <owl:DatatypeProperty rdf:about="#hasAfterGET"> + <rdfs:domain rdf:resource="#SparqlEndpoint"/> + </owl:DatatypeProperty> + <owl:DatatypeProperty rdf:about="#hasHost"> + <rdfs:domain rdf:resource="#SparqlEndpoint"/> + </owl:DatatypeProperty> + <owl:DatatypeProperty rdf:about="#hasMode"> + <rdfs:domain rdf:resource="#TypedQuery"/> + </owl:DatatypeProperty> + <owl:DatatypeProperty rdf:about="#hasParameterContent"> + <rdfs:domain rdf:resource="#GETParameter"/> + </owl:DatatypeProperty> + <owl:DatatypeProperty rdf:about="#hasParameterName"> + <rdfs:domain rdf:resource="#GETParameter"/> + </owl:DatatypeProperty> + <owl:DatatypeProperty rdf:about="#hasPort"> + <rdfs:domain rdf:resource="#SparqlEndpoint"/> + </owl:DatatypeProperty> + <owl:DatatypeProperty rdf:about="#hasQueryParameter"> + <rdfs:domain rdf:resource="#SparqlEndpoint"/> + </owl:DatatypeProperty> + <owl:DatatypeProperty rdf:about="#hasRecursionDepth"> + <rdfs:domain rdf:resource="#Configuration"/> + </owl:DatatypeProperty> + <owl:DatatypeProperty rdf:about="#usesLiterals"> + <rdfs:domain rdf:resource="#TypedQuery"/> + </owl:DatatypeProperty> + <!-- Object Properties --> + <owl:ObjectProperty rdf:about="#hasGETParameter"> + <rdfs:domain rdf:resource="#SparqlEndpoint"/> + <rdfs:range rdf:resource="#GETParameter"/> + </owl:ObjectProperty> + <owl:ObjectProperty rdf:about="#hasObjectFilterSet"> + <rdfs:domain rdf:resource="#TypedQuery"/> + <rdfs:range rdf:resource="#ObjectFilterSet"/> + </owl:ObjectProperty> + <owl:ObjectProperty rdf:about="#hasPredicateFilterSet"> + <rdfs:domain rdf:resource="#TypedQuery"/> + <rdfs:range rdf:resource="#PredicateFilterSet"/> + </owl:ObjectProperty> + <owl:ObjectProperty rdf:about="#hasSparqlEndpoint"> + <rdfs:domain rdf:resource="#Configuration"/> + <rdfs:range rdf:resource="#SparqlEndpoint"/> + </owl:ObjectProperty> + <owl:ObjectProperty rdf:about="#hasTypedQuery"> + <rdfs:domain rdf:resource="#Configuration"/> + <rdfs:range rdf:resource="#TypedQuery"/> + </owl:ObjectProperty> + <!-- Instances --> + <config:SparqlEndpoint rdf:about="#dbpediaEndpoint"> + <config:hasAfterGET rdf:datatype="&xsd;string">/sparql</config:hasAfterGET> + <config:hasGETParameter rdf:resource="#defaultgraphuri"/> + <config:hasGETParameter rdf:resource="#format"/> + <config:hasHost rdf:datatype="&xsd;string">dbpedia.openlinksw.com</config:hasHost> + <config:hasPort rdf:datatype="&xsd;string">80</config:hasPort> + <config:hasQueryParameter rdf:datatype="&xsd;string">query</config:hasQueryParameter> + <config:hasURL>dbpedia.openlinksw.com:80/sparql</config:hasURL> + </config:SparqlEndpoint> + <config:ObjectFilterSet rdf:about="#dbpediaGeneralObjectFilter"> + <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/resource/Category:Articles_</config:filtersURI> + <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/resource/Category:Wikipedia_</config:filtersURI> + <config:filtersURI rdf:datatype="&xsd;string">http://upload.wikimedia.org/wikipedia/commons</config:filtersURI> + <config:filtersURI rdf:datatype="&xsd;string">http://upload.wikimedia.org/wikipedia</config:filtersURI> + <config:filtersURI rdf:datatype="&xsd;string">http://www.geonames.org</config:filtersURI> + <config:filtersURI rdf:datatype="&xsd;string">http://www.w3.org/2006/03/wn/wn20/instances/synset</config:filtersURI> + <config:filtersURI rdf:datatype="&xsd;string">http://www4.wiwiss.fu-berlin.de/flickrwrappr</config:filtersURI> + </config:ObjectFilterSet> + <config:PredicateFilterSet rdf:about="#dbpediaGeneralPredicateFilter"> + <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/property/reference</config:filtersURI> + <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/property/website</config:filtersURI> + <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/property/wikipage-</config:filtersURI> + </config:PredicateFilterSet> + <config:TypedQuery rdf:about="#dbpediaYago"> + <config:hasMode rdf:datatype="&xsd;string">forbid</config:hasMode> + <config:hasObjectFilterSet rdf:resource="#dbpediaGeneralObjectFilter"/> + <config:hasObjectFilterSet rdf:resource="#foafObjectFilter"/> + <config:hasObjectFilterSet rdf:resource="#yagoObjectFilter"/> + <config:hasPredicateFilterSet rdf:resource="#dbpediaGeneralPredicateFilter"/> + <config:hasPredicateFilterSet rdf:resource="#foafPredicateFilter"/> + <config:hasPredicateFilterSet rdf:resource="#sameAsFilter"/> + <config:hasPredicateFilterSet rdf:resource="#yagoPredicateFilter"/> + <config:usesLiterals rdf:datatype="&xsd;string">false</config:usesLiterals> + </config:TypedQuery> + <config:Configuration rdf:about="#dbpediatest" rdfs:comment="for first test"> + <config:hasRecursionDepth rdf:datatype="&xsd;string">2</config:hasRecursionDepth> + <config:hasSparqlEndpoint rdf:resource="#dbpediaEndpoint"/> + <config:hasTypedQuery rdf:resource="#dbpediaYago"/> + </config:Configuration> + <config:GETParameter rdf:about="#defaultgraphuri"> + <config:hasParameterContent rdf:datatype="&xsd;string">http://dbpedia.org</config:hasParameterContent> + <config:hasParameterName rdf:datatype="&xsd;string">default-graph-uri</config:hasParameterName> + </config:GETParameter> + <config:ObjectFilterSet rdf:about="#foafObjectFilter"> + <config:filtersURI rdf:datatype="&xsd;string">http://xmlns.com/foaf/0.1/</config:filtersURI> + </config:ObjectFilterSet> + <config:PredicateFilterSet rdf:about="#foafPredicateFilter"> + <config:filtersURI rdf:datatype="&xsd;string">http://xmlns.com/foaf/0.1/</config:filtersURI> + </config:PredicateFilterSet> + <config:GETParameter rdf:about="#format"> + <config:hasParameterContent rdf:datatype="&xsd;string">application/sparql-results.xml</config:hasParameterContent> + <config:hasParameterName rdf:datatype="&xsd;string">format</config:hasParameterName> + </config:GETParameter> + <config:Configuration rdf:about="#localjoseki"> + <config:hasRecursionDepth rdf:datatype="&xsd;string">2</config:hasRecursionDepth> + <config:hasSparqlEndpoint rdf:resource="#localjosekiendpoint"/> + <config:hasTypedQuery rdf:resource="#localjosekitypedquery"/> + </config:Configuration> + <config:SparqlEndpoint rdf:about="#localjosekiendpoint"> + <config:hasAfterGET rdf:datatype="&xsd;string">/books</config:hasAfterGET> + <config:hasHost rdf:datatype="http://www.w3.org/2001/XMLSchema#string">localhost</config:hasHost> + <config:hasPort rdf:datatype="&xsd;string">2020</config:hasPort> + <config:hasQueryParameter rdf:datatype="&xsd;string">query</config:hasQueryParameter> + <config:hasURL>localhost:2020/books</config:hasURL> + </config:SparqlEndpoint> + <config:TypedQuery rdf:about="#localjosekitypedquery"> + <config:hasMode rdf:datatype="&xsd;string">forbid</config:hasMode> + <config:hasObjectFilterSet rdf:resource="#dbpediaGeneralObjectFilter"/> + <config:hasPredicateFilterSet rdf:resource="#dbpediaGeneralPredicateFilter"/> + <config:usesLiterals rdf:datatype="&xsd;string">false</config:usesLiterals> + </config:TypedQuery> + <config:PredicateFilterSet rdf:about="#sameAsFilter"> + <config:filtersURI rdf:datatype="&xsd;string">http://www.w3.org/2002/07/owl#sameAs</config:filtersURI> + </config:PredicateFilterSet> + <config:ObjectFilterSet rdf:about="#yagoObjectFilter"> + <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/resource/Category:Articles_</config:filtersURI> + <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/resource/Category:Wikipedia_</config:filtersURI> + </config:ObjectFilterSet> + <config:PredicateFilterSet rdf:about="#yagoPredicateFilter"> + <config:filtersURI rdf:datatype="&xsd;string">http://www.w3.org/2004/02/skos/core</config:filtersURI> + </config:PredicateFilterSet> + <rdf:Description rdf:about="#hasURL"> + <rdf:type> + <rdf:Description rdf:about="http://www.w3.org/2002/07/owl#DatatypeProperty"/> + </rdf:type> + <rdfs:domain> + <rdf:Description rdf:about="#SparqlEndpoint"/> + </rdfs:domain> + </rdf:Description> +</rdf:RDF> This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-02-07 19:16:04
|
Revision: 522 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=522&view=rev Author: jenslehmann Date: 2008-02-07 11:15:54 -0800 (Thu, 07 Feb 2008) Log Message: ----------- - added lymphography data set (generously donated by Sebastian H.) - small bug fix in cross validation Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/reasoning/FastRetrieval.java trunk/src/dl-learner/org/dllearner/utilities/CrossValidation.java Added Paths: ----------- trunk/examples/lymphography/ trunk/examples/lymphography/lymphography.owl trunk/examples/lymphography/lymphography_Class1.conf trunk/examples/lymphography/lymphography_Class2.conf trunk/examples/lymphography/lymphography_Class3.conf trunk/examples/lymphography/lymphography_Class4.conf Added: trunk/examples/lymphography/lymphography.owl =================================================================== --- trunk/examples/lymphography/lymphography.owl (rev 0) +++ trunk/examples/lymphography/lymphography.owl 2008-02-07 19:15:54 UTC (rev 522) @@ -0,0 +1,2497 @@ @@ Diff output truncated at 100000 characters. @@ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-02-17 17:31:00
|
Revision: 592 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=592&view=rev Author: jenslehmann Date: 2008-02-17 09:30:58 -0800 (Sun, 17 Feb 2008) Log Message: ----------- fixed @.todo tag problem in build script Modified Paths: -------------- trunk/build.xml trunk/src/dl-learner/org/dllearner/utilities/ToDoTaglet.java Modified: trunk/build.xml =================================================================== --- trunk/build.xml 2008-02-17 15:55:11 UTC (rev 591) +++ trunk/build.xml 2008-02-17 17:30:58 UTC (rev 592) @@ -13,17 +13,11 @@ <property name="release_tmp_dir" value="release/${version_dir}" /> <property name="release_php_client_tmp_dir" value="release/php-client-${today}" /> - <!-- KAON2 must not be distributed with the official release --> - <property name="kaon2" value="${lib_dir}/kaon2.jar"/> - <!-- set up classpath --> <path id="classpath"> <pathelement location="."/> <fileset dir="${lib_dir}"> <include name="**/*.jar"/> - <include name="**/modules/*.jar"/> - <include name="**/jena/*.jar"/> - <include name="**/owlapi/*.jar"/> </fileset> </path> @@ -55,8 +49,7 @@ <!-- copy all other libraries --> <copy toDir="${release_tmp_dir}/lib"> - <!-- KAON2 has to be excluded, because it cannot be freely distributed --> - <fileset dir="${lib_dir}" excludes="${kaon2}" /> + <fileset dir="${lib_dir}" /> </copy> <!-- copy binary files and examples --> @@ -197,7 +190,7 @@ <pathconvert refid="classpath" property="jdocclasspath"/> <javadoc access="public" author="true" - classpath="${jdocclasspath}" + classpath="${jdocclasspath};${java.home}/../lib/tools.jar" destdir="doc/javadoc" doctitle="DL-Learner Javadoc" nodeprecated="false" Modified: trunk/src/dl-learner/org/dllearner/utilities/ToDoTaglet.java =================================================================== --- trunk/src/dl-learner/org/dllearner/utilities/ToDoTaglet.java 2008-02-17 15:55:11 UTC (rev 591) +++ trunk/src/dl-learner/org/dllearner/utilities/ToDoTaglet.java 2008-02-17 17:30:58 UTC (rev 592) @@ -6,16 +6,20 @@ import com.sun.tools.doclets.Taglet; /** - * Small taglet for showing todo-markers in Javadoc-runs. + * Small taglet for showing todo-markers in Javadoc-runs. You can insert + * the following in comments: * - * Parts of the code are taken from the JDK Javadoc. + * @.todo task + * + * (Note the dot, which avoids conflicts with a possible future @todo + * standard tag.) Parts of the code are taken from the JDK Javadoc. * * @author Jens Lehmann * */ public class ToDoTaglet implements Taglet { - private static final String NAME = "todo"; + private static final String NAME = ".todo"; private static final String HEADER = "To Do:"; /** @@ -26,9 +30,9 @@ } /** - * Will return true since <code>@todo</code> + * Will return true since <code>@.todo</code> * can be used in field documentation. - * @return true since <code>@todo</code> + * @return true since <code>@.todo</code> * can be used in field documentation and false * otherwise. */ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-02-17 18:28:52
|
Revision: 595 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=595&view=rev Author: jenslehmann Date: 2008-02-17 10:28:38 -0800 (Sun, 17 Feb 2008) Log Message: ----------- fixed bug in build script Modified Paths: -------------- trunk/bin/dllearner trunk/build.xml trunk/src/dl-learner/org/dllearner/server/DLLearnerWSStart.java Modified: trunk/bin/dllearner =================================================================== --- trunk/bin/dllearner 2008-02-17 18:14:40 UTC (rev 594) +++ trunk/bin/dllearner 2008-02-17 18:28:38 UTC (rev 595) @@ -1 +1 @@ -java -cp .:./lib/dig1.1-xmlbeans.jar:./lib/fact/FaCT++OWLAPI-v1.1.10+.jar:./lib/jena/antlr-2.7.5.jar:./lib/jena/arq.jar:./lib/jena/commons-logging-1.1.jar:./lib/jena/concurrent.jar:./lib/jena/icu4j_3_4.jar:./lib/jena/iri.jar:./lib/jena/jena.jar:./lib/jena/json.jar:./lib/jena/xercesImpl.jar:./lib/junit-4.4.jar:./lib/log4j.jar:./lib/owlapi/antlr-runtime-3.0.jar:./lib/owlapi/commons-lang-2.2.jar:./lib/owlapi/owlapi-api.jar:./lib/owlapi/owlapi-apibinding.jar:./lib/owlapi/owlapi-change.jar:./lib/owlapi/owlapi-debugging.jar:./lib/owlapi/owlapi-dig1_1.jar:./lib/owlapi/owlapi-functionalparser.jar:./lib/owlapi/owlapi-functionalrenderer.jar:./lib/owlapi/owlapi-impl.jar:./lib/owlapi/owlapi-krssparser.jar:./lib/owlapi/owlapi-mansyntaxparser.jar:./lib/owlapi/owlapi-mansyntaxrenderer.jar:./lib/owlapi/owlapi-metrics.jar:./lib/owlapi/owlapi-oboparser.jar:./lib/owlapi/owlapi-owlxmlparser.jar:./lib/owlapi/owlapi-owlxmlrenderer.jar:./lib/owlapi/owlapi-rdfapi.jar:./lib/owlapi/owlapi-rdfxmlparser.jar:./lib/owlapi/owlapi-rdfxmlrenderer.jar:./lib/owlapi/owlapi-util.jar:./lib/pellet/aterm-java-1.6.jar:./lib/pellet/pellet.jar:./lib/pellet/relaxngDatatype.jar:./lib/pellet/xsdlib.jar:./lib/xbean.jar:./lib/dllearner.jar org.dllearner.cli.start $@ \ No newline at end of file +java -cp .:./lib/dig1.1-xmlbeans.jar:./lib/fact/FaCT++OWLAPI-v1.1.10+.jar:./lib/jena/antlr-2.7.5.jar:./lib/jena/arq.jar:./lib/jena/commons-logging-1.1.jar:./lib/jena/concurrent.jar:./lib/jena/icu4j_3_4.jar:./lib/jena/iri.jar:./lib/jena/jena.jar:./lib/jena/json.jar:./lib/jena/xercesImpl.jar:./lib/junit-4.4.jar:./lib/log4j.jar:./lib/owlapi/antlr-runtime-3.0.jar:./lib/owlapi/commons-lang-2.2.jar:./lib/owlapi/owlapi-api.jar:./lib/owlapi/owlapi-apibinding.jar:./lib/owlapi/owlapi-change.jar:./lib/owlapi/owlapi-debugging.jar:./lib/owlapi/owlapi-dig1_1.jar:./lib/owlapi/owlapi-functionalparser.jar:./lib/owlapi/owlapi-functionalrenderer.jar:./lib/owlapi/owlapi-impl.jar:./lib/owlapi/owlapi-krssparser.jar:./lib/owlapi/owlapi-mansyntaxparser.jar:./lib/owlapi/owlapi-mansyntaxrenderer.jar:./lib/owlapi/owlapi-metrics.jar:./lib/owlapi/owlapi-oboparser.jar:./lib/owlapi/owlapi-owlxmlparser.jar:./lib/owlapi/owlapi-owlxmlrenderer.jar:./lib/owlapi/owlapi-rdfapi.jar:./lib/owlapi/owlapi-rdfxmlparser.jar:./lib/owlapi/owlapi-rdfxmlrenderer.jar:./lib/owlapi/owlapi-util.jar:./lib/pellet/aterm-java-1.6.jar:./lib/pellet/pellet.jar:./lib/pellet/relaxngDatatype.jar:./lib/pellet/xsdlib.jar:./lib/xbean.jar:./lib/dllearner.jar org.dllearner.cli.Start $@ \ No newline at end of file Modified: trunk/build.xml =================================================================== --- trunk/build.xml 2008-02-17 18:14:40 UTC (rev 594) +++ trunk/build.xml 2008-02-17 18:28:38 UTC (rev 595) @@ -181,7 +181,7 @@ <echo file="bin/dllearner.bat" message="java -cp ${pathStringWin} org.dllearner.cli.Start %*"/> <echo file="bin/ws.bat" message="java -cp ${pathStringWin} org.dllearner.server.DLLearnerWSStart %*"/> <echo file="bin/quickstart" message="java -cp ${pathStringUnix} org.dllearner.cli.QuickStart"/> - <echo file="bin/dllearner" message="java -cp ${pathStringUnix} org.dllearner.cli.start $@"/> + <echo file="bin/dllearner" message="java -cp ${pathStringUnix} org.dllearner.cli.Start $@"/> <echo file="bin/ws" message="java -cp ${pathStringUnix} org.dllearner.server.DLLearnerWSStart $@"/> </target> Modified: trunk/src/dl-learner/org/dllearner/server/DLLearnerWSStart.java =================================================================== --- trunk/src/dl-learner/org/dllearner/server/DLLearnerWSStart.java 2008-02-17 18:14:40 UTC (rev 594) +++ trunk/src/dl-learner/org/dllearner/server/DLLearnerWSStart.java 2008-02-17 18:28:38 UTC (rev 595) @@ -66,7 +66,7 @@ server.setExecutor(threads); server.start(); - System.out.println("Starting DL-Learner web service at http://" + isa.getHostName() + ":" + System.out.print("Starting DL-Learner web service at http://" + isa.getHostName() + ":" + isa.getPort() + "/services ... "); Endpoint endpoint = Endpoint.create(new DLLearnerWS()); // Endpoint endpoint = Endpoint.create(new CustomDataClass()); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-02-17 23:38:50
|
Revision: 596 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=596&view=rev Author: kurzum Date: 2008-02-17 15:38:43 -0800 (Sun, 17 Feb 2008) Log Message: ----------- fixed bug #1878069 Modified Paths: -------------- trunk/examples/commandcollection.txt trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java trunk/src/dl-learner/org/dllearner/test/JenaLongQueryTest.java Modified: trunk/examples/commandcollection.txt =================================================================== --- trunk/examples/commandcollection.txt 2008-02-17 18:28:38 UTC (rev 595) +++ trunk/examples/commandcollection.txt 2008-02-17 23:38:43 UTC (rev 596) @@ -19,6 +19,7 @@ // algorithm settings algorithm = refinement; +algorithm = refexamples refinement.horizontalExpansionFactor = 0.6; refinement.quiet = false; refinement.useTooWeakList = true; Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java 2008-02-17 18:28:38 UTC (rev 595) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java 2008-02-17 23:38:43 UTC (rev 596) @@ -32,6 +32,7 @@ import com.hp.hpl.jena.query.ResultSetFactory; import com.hp.hpl.jena.query.ResultSetFormatter; import com.hp.hpl.jena.sparql.core.ResultBinding; +import com.hp.hpl.jena.sparql.engine.http.HttpQuery; import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; /** @@ -82,6 +83,7 @@ } logger.info("query SPARQL server"); try{ + HttpQuery.urlLimit = 3*1024 ; rs = queryExecution.execSelect(); logger.info(rs.getResultVars().toString()); } catch (Exception e){ Modified: trunk/src/dl-learner/org/dllearner/test/JenaLongQueryTest.java =================================================================== --- trunk/src/dl-learner/org/dllearner/test/JenaLongQueryTest.java 2008-02-17 18:28:38 UTC (rev 595) +++ trunk/src/dl-learner/org/dllearner/test/JenaLongQueryTest.java 2008-02-17 23:38:43 UTC (rev 596) @@ -2,6 +2,7 @@ import com.hp.hpl.jena.query.ResultSet; import com.hp.hpl.jena.query.ResultSetFormatter; +import com.hp.hpl.jena.sparql.engine.http.HttpQuery; import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP; @@ -63,6 +64,7 @@ ResultSet rs; String xml; QueryEngineHTTP queryExecution; + HttpQuery.urlLimit = 3*1024 ; queryExecution = new QueryEngineHTTP(url, shortQuery); queryExecution.addDefaultGraph(defaultgraph); rs = queryExecution.execSelect(); @@ -72,14 +74,17 @@ try{ queryExecution=new QueryEngineHTTP(url,longQuery); queryExecution.addDefaultGraph(defaultgraph); + + //queryExecution.getContext(). rs = queryExecution.execSelect(); xml = ResultSetFormatter.asXMLString(rs); System.out.println("Long Query ResultSet length: "+xml.length()+"\n"); System.out.println("Long query XML: "+xml); }catch (Exception e) {e.printStackTrace();} + // String queryWithIncreasingLength=""; - for (int i = 0; i < 30; i++) { + for (int i = 0; i < 100; i++) { queryWithIncreasingLength = makeQueryString ( i); queryExecution=new QueryEngineHTTP(url,queryWithIncreasingLength); queryExecution.addDefaultGraph(defaultgraph); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-02-18 14:27:09
|
Revision: 600 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=600&view=rev Author: kurzum Date: 2008-02-18 06:26:58 -0800 (Mon, 18 Feb 2008) Log Message: ----------- fixed examples (header lymphography, some readmes) Modified Paths: -------------- trunk/examples/lymphography/lymphography_Class1.conf trunk/examples/lymphography/lymphography_Class2.conf trunk/examples/lymphography/lymphography_Class3.conf trunk/examples/lymphography/lymphography_Class4.conf trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java Added Paths: ----------- trunk/examples/sparql/README.txt trunk/examples/sparql/roles_notworking/README.txt Removed Paths: ------------- trunk/examples/sparql/config.owl Modified: trunk/examples/lymphography/lymphography_Class1.conf =================================================================== --- trunk/examples/lymphography/lymphography_Class1.conf 2008-02-18 12:35:49 UTC (rev 599) +++ trunk/examples/lymphography/lymphography_Class1.conf 2008-02-18 14:26:58 UTC (rev 600) @@ -1,4 +1,31 @@ +/* +Converted form the UCI Machine Learning Repository at: +http://archive.ics.uci.edu/ml/datasets/Lymphography +Files: +lymphography_Class1.conf +lymphography_Class2.conf +lymphography_Class3.conf +lymphography_Class4.conf + +A solution ist found relatively fast for example 1 and 4. +Because of the increasing length of concepts for example 2 and 4, +it will need a lot of time to find a solution, in case it exists at all. +Mainly used as a test case for improving the algorithms (example 4 uses +a new experimental version of the refinement algorithm) + + +*/ + + +reasoner = dig; +//reasoner = fastRetrieval; + +algorithm = refinement; +//refinement.heuristic = flexible; +//algorithm = refexamples; + + refinement.ignoredConcepts = { "http://www.example.org/lymphography#Target1_NormalFind", "http://www.example.org/lymphography#Target2_Metastases", Modified: trunk/examples/lymphography/lymphography_Class2.conf =================================================================== --- trunk/examples/lymphography/lymphography_Class2.conf 2008-02-18 12:35:49 UTC (rev 599) +++ trunk/examples/lymphography/lymphography_Class2.conf 2008-02-18 14:26:58 UTC (rev 600) @@ -1,3 +1,28 @@ +/* +Converted form the UCI Machine Learning Repository at: +http://archive.ics.uci.edu/ml/datasets/Lymphography + +Files: +lymphography_Class1.conf +lymphography_Class2.conf +lymphography_Class3.conf +lymphography_Class4.conf + +A solution ist found relatively fast for example 1 and 4. +Because of the increasing length of concepts for example 2 and 4, +it will need a lot of time to find a solution, in case it exists at all. +Mainly used as a test case for improving the algorithms (example 4 uses +a new experimental version of the refinement algorithm) + + +*/ +reasoner = dig; +//reasoner = fastRetrieval; + +algorithm = refinement; +//refinement.heuristic = flexible; +//algorithm = refexamples; + refinement.ignoredConcepts = { "http://www.example.org/lymphography#Target1_NormalFind", "http://www.example.org/lymphography#Target2_Metastases", @@ -6,6 +31,8 @@ }; + + refinement.useAllConstructor = false; refinement.useExistsConstructor = true; refinement.useNegation = false; Modified: trunk/examples/lymphography/lymphography_Class3.conf =================================================================== --- trunk/examples/lymphography/lymphography_Class3.conf 2008-02-18 12:35:49 UTC (rev 599) +++ trunk/examples/lymphography/lymphography_Class3.conf 2008-02-18 14:26:58 UTC (rev 600) @@ -1,3 +1,31 @@ +/* +Converted form the UCI Machine Learning Repository at: +http://archive.ics.uci.edu/ml/datasets/Lymphography + +Files: +lymphography_Class1.conf +lymphography_Class2.conf +lymphography_Class3.conf +lymphography_Class4.conf + +A solution ist found relatively fast for example 1 and 4. +Because of the increasing length of concepts for example 2 and 4, +it will need a lot of time to find a solution, in case it exists at all. +Mainly used as a test case for improving the algorithms (example 4 uses +a new experimental version of the refinement algorithm) + + +*/ + + + +reasoner = dig; +//reasoner = fastRetrieval; + +algorithm = refinement; +//refinement.heuristic = flexible; +//algorithm = refexamples; + refinement.ignoredConcepts = { "http://www.example.org/lymphography#Target1_NormalFind", "http://www.example.org/lymphography#Target2_Metastases", Modified: trunk/examples/lymphography/lymphography_Class4.conf =================================================================== --- trunk/examples/lymphography/lymphography_Class4.conf 2008-02-18 12:35:49 UTC (rev 599) +++ trunk/examples/lymphography/lymphography_Class4.conf 2008-02-18 14:26:58 UTC (rev 600) @@ -1,6 +1,28 @@ +/* +Converted form the UCI Machine Learning Repository at: +http://archive.ics.uci.edu/ml/datasets/Lymphography +Files: +lymphography_Class1.conf +lymphography_Class2.conf +lymphography_Class3.conf +lymphography_Class4.conf + +A solution ist found relatively fast for example 1 and 4. +Because of the increasing length of concepts for example 2 and 4, +it will need a lot of time to find a solution, in case it exists at all. +Mainly used as a test case for improving the algorithms (example 4 uses +a new experimental version of the refinement algorithm) + + +*/ + + +//reasoner = dig; reasoner = fastRetrieval; +//algorithm = refinement; +//refinement.heuristic = flexible; algorithm = refexamples; refexamples.ignoredConcepts = { Added: trunk/examples/sparql/README.txt =================================================================== --- trunk/examples/sparql/README.txt (rev 0) +++ trunk/examples/sparql/README.txt 2008-02-18 14:26:58 UTC (rev 600) @@ -0,0 +1,7 @@ + +Note: DBpedia is always subject to change, solutions will change over time + +After using the SPARQL Component, a n-triple file is created at: +examples/../cache/ or root/cache +Which contains the used knowledge base. + Deleted: trunk/examples/sparql/config.owl =================================================================== --- trunk/examples/sparql/config.owl 2008-02-18 12:35:49 UTC (rev 599) +++ trunk/examples/sparql/config.owl 2008-02-18 14:26:58 UTC (rev 600) @@ -1,172 +0,0 @@ -<?xml version="1.0"?> -<!DOCTYPE rdf:RDF [ - <!ENTITY config "http://www.extraction.org/config#"> - <!ENTITY owl "http://www.w3.org/2002/07/owl#"> - <!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#"> - <!ENTITY rdfs "http://www.w3.org/2000/01/rdf-schema#"> - <!ENTITY xsd "http://www.w3.org/2001/XMLSchema#"> -]> -<rdf:RDF xml:base="http://www.extraction.org/config" xmlns:config="http://www.extraction.org/config#" xmlns:owl="http://www.w3.org/2002/07/owl#" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#"> - <!-- Ontology Information --> - <owl:Ontology rdf:about=""> - <owl:versionInfo xml:lang="en">version 0.1</owl:versionInfo> - </owl:Ontology> - <!-- Classes --> - <owl:Class rdf:about="#Configuration"/> - <owl:Class rdf:about="#FilterSet"/> - <owl:Class rdf:about="#GETParameter"/> - <owl:Class rdf:about="#ObjectFilterSet"> - <rdfs:subClassOf rdf:resource="#FilterSet"/> - </owl:Class> - <owl:Class rdf:about="#PredicateFilterSet"> - <rdfs:subClassOf rdf:resource="#FilterSet"/> - </owl:Class> - <owl:Class rdf:about="#SparqlEndpoint"/> - <owl:Class rdf:about="#TypedQuery"/> - <!-- Annotation Properties --> - <owl:AnnotationProperty rdf:about="&rdfs;comment"/> - <owl:AnnotationProperty rdf:about="&owl;versionInfo"/> - <!-- Datatype Properties --> - <owl:DatatypeProperty rdf:about="#filtersURI"> - <rdfs:domain rdf:resource="#ObjectFilterSet"/> - <rdfs:domain rdf:resource="#PredicateFilterSet"/> - </owl:DatatypeProperty> - <owl:DatatypeProperty rdf:about="#hasAfterGET"> - <rdfs:domain rdf:resource="#SparqlEndpoint"/> - </owl:DatatypeProperty> - <owl:DatatypeProperty rdf:about="#hasHost"> - <rdfs:domain rdf:resource="#SparqlEndpoint"/> - </owl:DatatypeProperty> - <owl:DatatypeProperty rdf:about="#hasMode"> - <rdfs:domain rdf:resource="#TypedQuery"/> - </owl:DatatypeProperty> - <owl:DatatypeProperty rdf:about="#hasParameterContent"> - <rdfs:domain rdf:resource="#GETParameter"/> - </owl:DatatypeProperty> - <owl:DatatypeProperty rdf:about="#hasParameterName"> - <rdfs:domain rdf:resource="#GETParameter"/> - </owl:DatatypeProperty> - <owl:DatatypeProperty rdf:about="#hasPort"> - <rdfs:domain rdf:resource="#SparqlEndpoint"/> - </owl:DatatypeProperty> - <owl:DatatypeProperty rdf:about="#hasQueryParameter"> - <rdfs:domain rdf:resource="#SparqlEndpoint"/> - </owl:DatatypeProperty> - <owl:DatatypeProperty rdf:about="#hasRecursionDepth"> - <rdfs:domain rdf:resource="#Configuration"/> - </owl:DatatypeProperty> - <owl:DatatypeProperty rdf:about="#usesLiterals"> - <rdfs:domain rdf:resource="#TypedQuery"/> - </owl:DatatypeProperty> - <!-- Object Properties --> - <owl:ObjectProperty rdf:about="#hasGETParameter"> - <rdfs:domain rdf:resource="#SparqlEndpoint"/> - <rdfs:range rdf:resource="#GETParameter"/> - </owl:ObjectProperty> - <owl:ObjectProperty rdf:about="#hasObjectFilterSet"> - <rdfs:domain rdf:resource="#TypedQuery"/> - <rdfs:range rdf:resource="#ObjectFilterSet"/> - </owl:ObjectProperty> - <owl:ObjectProperty rdf:about="#hasPredicateFilterSet"> - <rdfs:domain rdf:resource="#TypedQuery"/> - <rdfs:range rdf:resource="#PredicateFilterSet"/> - </owl:ObjectProperty> - <owl:ObjectProperty rdf:about="#hasSparqlEndpoint"> - <rdfs:domain rdf:resource="#Configuration"/> - <rdfs:range rdf:resource="#SparqlEndpoint"/> - </owl:ObjectProperty> - <owl:ObjectProperty rdf:about="#hasTypedQuery"> - <rdfs:domain rdf:resource="#Configuration"/> - <rdfs:range rdf:resource="#TypedQuery"/> - </owl:ObjectProperty> - <!-- Instances --> - <config:SparqlEndpoint rdf:about="#dbpediaEndpoint"> - <config:hasAfterGET rdf:datatype="&xsd;string">/sparql</config:hasAfterGET> - <config:hasGETParameter rdf:resource="#defaultgraphuri"/> - <config:hasGETParameter rdf:resource="#format"/> - <config:hasHost rdf:datatype="&xsd;string">dbpedia.openlinksw.com</config:hasHost> - <config:hasPort rdf:datatype="&xsd;string">80</config:hasPort> - <config:hasQueryParameter rdf:datatype="&xsd;string">query</config:hasQueryParameter> - <config:hasURL>dbpedia.openlinksw.com:80/sparql</config:hasURL> - </config:SparqlEndpoint> - <config:ObjectFilterSet rdf:about="#dbpediaGeneralObjectFilter"> - <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/resource/Category:Articles_</config:filtersURI> - <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/resource/Category:Wikipedia_</config:filtersURI> - <config:filtersURI rdf:datatype="&xsd;string">http://upload.wikimedia.org/wikipedia/commons</config:filtersURI> - <config:filtersURI rdf:datatype="&xsd;string">http://upload.wikimedia.org/wikipedia</config:filtersURI> - <config:filtersURI rdf:datatype="&xsd;string">http://www.geonames.org</config:filtersURI> - <config:filtersURI rdf:datatype="&xsd;string">http://www.w3.org/2006/03/wn/wn20/instances/synset</config:filtersURI> - <config:filtersURI rdf:datatype="&xsd;string">http://www4.wiwiss.fu-berlin.de/flickrwrappr</config:filtersURI> - </config:ObjectFilterSet> - <config:PredicateFilterSet rdf:about="#dbpediaGeneralPredicateFilter"> - <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/property/reference</config:filtersURI> - <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/property/website</config:filtersURI> - <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/property/wikipage-</config:filtersURI> - </config:PredicateFilterSet> - <config:TypedQuery rdf:about="#dbpediaYago"> - <config:hasMode rdf:datatype="&xsd;string">forbid</config:hasMode> - <config:hasObjectFilterSet rdf:resource="#dbpediaGeneralObjectFilter"/> - <config:hasObjectFilterSet rdf:resource="#foafObjectFilter"/> - <config:hasObjectFilterSet rdf:resource="#yagoObjectFilter"/> - <config:hasPredicateFilterSet rdf:resource="#dbpediaGeneralPredicateFilter"/> - <config:hasPredicateFilterSet rdf:resource="#foafPredicateFilter"/> - <config:hasPredicateFilterSet rdf:resource="#sameAsFilter"/> - <config:hasPredicateFilterSet rdf:resource="#yagoPredicateFilter"/> - <config:usesLiterals rdf:datatype="&xsd;string">false</config:usesLiterals> - </config:TypedQuery> - <config:Configuration rdf:about="#dbpediatest" rdfs:comment="for first test"> - <config:hasRecursionDepth rdf:datatype="&xsd;string">2</config:hasRecursionDepth> - <config:hasSparqlEndpoint rdf:resource="#dbpediaEndpoint"/> - <config:hasTypedQuery rdf:resource="#dbpediaYago"/> - </config:Configuration> - <config:GETParameter rdf:about="#defaultgraphuri"> - <config:hasParameterContent rdf:datatype="&xsd;string">http://dbpedia.org</config:hasParameterContent> - <config:hasParameterName rdf:datatype="&xsd;string">default-graph-uri</config:hasParameterName> - </config:GETParameter> - <config:ObjectFilterSet rdf:about="#foafObjectFilter"> - <config:filtersURI rdf:datatype="&xsd;string">http://xmlns.com/foaf/0.1/</config:filtersURI> - </config:ObjectFilterSet> - <config:PredicateFilterSet rdf:about="#foafPredicateFilter"> - <config:filtersURI rdf:datatype="&xsd;string">http://xmlns.com/foaf/0.1/</config:filtersURI> - </config:PredicateFilterSet> - <config:GETParameter rdf:about="#format"> - <config:hasParameterContent rdf:datatype="&xsd;string">application/sparql-results.xml</config:hasParameterContent> - <config:hasParameterName rdf:datatype="&xsd;string">format</config:hasParameterName> - </config:GETParameter> - <config:Configuration rdf:about="#localjoseki"> - <config:hasRecursionDepth rdf:datatype="&xsd;string">2</config:hasRecursionDepth> - <config:hasSparqlEndpoint rdf:resource="#localjosekiendpoint"/> - <config:hasTypedQuery rdf:resource="#localjosekitypedquery"/> - </config:Configuration> - <config:SparqlEndpoint rdf:about="#localjosekiendpoint"> - <config:hasAfterGET rdf:datatype="&xsd;string">/books</config:hasAfterGET> - <config:hasHost rdf:datatype="http://www.w3.org/2001/XMLSchema#string">localhost</config:hasHost> - <config:hasPort rdf:datatype="&xsd;string">2020</config:hasPort> - <config:hasQueryParameter rdf:datatype="&xsd;string">query</config:hasQueryParameter> - <config:hasURL>localhost:2020/books</config:hasURL> - </config:SparqlEndpoint> - <config:TypedQuery rdf:about="#localjosekitypedquery"> - <config:hasMode rdf:datatype="&xsd;string">forbid</config:hasMode> - <config:hasObjectFilterSet rdf:resource="#dbpediaGeneralObjectFilter"/> - <config:hasPredicateFilterSet rdf:resource="#dbpediaGeneralPredicateFilter"/> - <config:usesLiterals rdf:datatype="&xsd;string">false</config:usesLiterals> - </config:TypedQuery> - <config:PredicateFilterSet rdf:about="#sameAsFilter"> - <config:filtersURI rdf:datatype="&xsd;string">http://www.w3.org/2002/07/owl#sameAs</config:filtersURI> - </config:PredicateFilterSet> - <config:ObjectFilterSet rdf:about="#yagoObjectFilter"> - <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/resource/Category:Articles_</config:filtersURI> - <config:filtersURI rdf:datatype="&xsd;string">http://dbpedia.org/resource/Category:Wikipedia_</config:filtersURI> - </config:ObjectFilterSet> - <config:PredicateFilterSet rdf:about="#yagoPredicateFilter"> - <config:filtersURI rdf:datatype="&xsd;string">http://www.w3.org/2004/02/skos/core</config:filtersURI> - </config:PredicateFilterSet> - <rdf:Description rdf:about="#hasURL"> - <rdf:type> - <rdf:Description rdf:about="http://www.w3.org/2002/07/owl#DatatypeProperty"/> - </rdf:type> - <rdfs:domain> - <rdf:Description rdf:about="#SparqlEndpoint"/> - </rdfs:domain> - </rdf:Description> -</rdf:RDF> Added: trunk/examples/sparql/roles_notworking/README.txt =================================================================== --- trunk/examples/sparql/roles_notworking/README.txt (rev 0) +++ trunk/examples/sparql/roles_notworking/README.txt 2008-02-18 14:26:58 UTC (rev 600) @@ -0,0 +1,4 @@ + +The learning of domain/range for roles has been deactivated for this release, +because it involved several manual steps, which were not supported by a gui +or a dialog system. We try to automate this process for the next release. \ No newline at end of file Modified: trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java =================================================================== --- trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java 2008-02-18 12:35:49 UTC (rev 599) +++ trunk/src/dl-learner/org/dllearner/kb/sparql/SparqlQuery.java 2008-02-18 14:26:58 UTC (rev 600) @@ -83,6 +83,7 @@ } logger.info("query SPARQL server"); try{ + //TODO after overnext Jena release HttpQuery.urlLimit = 3*1024 ; rs = queryExecution.execSelect(); logger.info(rs.getResultVars().toString()); This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-02-18 15:54:05
|
Revision: 604 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=604&view=rev Author: jenslehmann Date: 2008-02-18 07:54:00 -0800 (Mon, 18 Feb 2008) Log Message: ----------- - updated doc folder - small fix in build script Modified Paths: -------------- trunk/build.xml trunk/src/dl-learner/org/dllearner/Info.java Added Paths: ----------- trunk/doc/kbFileSyntax.txt Removed Paths: ------------- trunk/doc/configOptionsOld.txt trunk/doc/sparqlModule.txt Modified: trunk/build.xml =================================================================== --- trunk/build.xml 2008-02-18 15:35:03 UTC (rev 603) +++ trunk/build.xml 2008-02-18 15:54:00 UTC (rev 604) @@ -92,7 +92,7 @@ <!-- copy source code --> <mkdir dir="${release_tmp_dir}/src/"/> <copy toDir="${release_tmp_dir}/src/"> - <fileset dir="${source_dir}" includes="**/*.java,**/*.jjt,build.xml"/> + <fileset dir="${source_dir}" includes="**/*.java,**/*.html,**/*.gif,**/*.jjt,build.xml"/> </copy> <!-- create copy developer documentation --> Deleted: trunk/doc/configOptionsOld.txt =================================================================== --- trunk/doc/configOptionsOld.txt 2008-02-18 15:35:03 UTC (rev 603) +++ trunk/doc/configOptionsOld.txt 2008-02-18 15:54:00 UTC (rev 604) @@ -1,294 +0,0 @@ -Configuration Files -=================== - -This file gives an overview for running DL-Learner using configuration files -as provided in the examples directory. - -The background knowledge can either be given as OWL DL file (using the import -function in the configuration files) or by specifying it directly in the -configuration file (which we refer to as the internal knowledge base). - -Some examples of the syntax of the background knowledge in the internal -knowledge base: - -person = (male OR female). -mother = (female AND EXISTS hasChild.TOP). -motherManyDaughters = (female AND >= 4 hasChild.female). -(mother AND father) SUBCLASSOF person. - -Also see the example files. - -This is the EBNF description of the input language [slightly outdated]: - -Number = ["1"-"9"] (["0"-"9"])* -Id = ["a"-"z"] (["_","a"-"z","A"-"Z","0"-"9"])* -String: "\"" (~["\"","\\","\n","\r"])* "\"" -Instruction = ConfOption - | FunctionCall - | PosExample - | NegExample - | ABoxConcept - | ABoxRole - | Transitive - | Functional - | Symmetric - | Inverse - | Subrole - | TBoxEquiv - | TBoxSub -ConfOption = Id [ "." Id ] "=" ( Id | Number ) ";" -FunctionCall = Id "(" String ")" ";" -PosExample = "+" Id "(" Id ")" "." -NegExample = "-" Id "(" Id ")" "." -ABoxConcept = Concept "(" Id ")" "." -ABoxRole = Id "(" Id "," Id ")" "." -Transitive = "Transitive" "(" Id ")" "." -Functional = "Functional" "(" Id ")" "." -Symmetric = "Symmetric" "(" Id ")" "." -Inverse = "Inverse" "(" Id "," Id ")" "." -Subrole = "Subrole" "(" Id "," Id ")" "." -TBoxEquiv = Concept "=" Concept "." -TBoxSub = Concept ("SUBCLASSOF" | "SUB" ) Concept "." -Concept = "TOP" - | "BOTTOM" - | Id - | "(" Concept "AND" Concept ")" - | "(" Concept "OR" Concept ")" - | "EXISTS" Id "." Concept - | "ALL" Id "." Concept - | "NOT" Concept - | ">=" Number Id "." Concept - | "<=" Number Id "." Concept - -Configuration Options -===================== - -General -------- - -Option: algorithm -Possible Values: bruteForce, gp, random, refinement, hybridGP -Default: refinement -Effect: Specifies the algorithm to use for solving the learning problem. Note, - that hybridGP is not an algorithm itself, but starts the GP algorithm - with a sensible set of default values for the hybrid algorithm combining - GP with refinement operators. In particular the probability of all - operators except refinement is set to 0. - -Option: reasoner -Possible Values: dig, kaon2, fastRetrieval -Default: dig -Effect: Specifies the reasoner to be used. DIG communicates with a reasoner - using the DIG Interface. KAON2 means to use the KAON2 Java API directly. - FastRetrieval is an internal algorithm, which can only be used for - retrieval (not for subsumption). Currently the DIG reasoner cannot read - OWL files. - -Option: digReasonerURL -Possible Values: a valid URL -Default: http://localhost:8081 -Effect: Specifies the URL to be used to look for a DIG capable reasoner. - -Option: writeDIGProtocol -Possible Values: true, false -Default: false -Effect: Specifies whether to store all DIG communication. - -Option: digProtocolFile -Possible Values: strings -Default: digProtocol.txt -Effect: The file to store all DIG communication if writeDIGProtocol is true. - -Option: useRetrievalForClassification -Possible Values: true, false -Default: false -Effect: To measure which concepts are covered, one can either use one retrieval - or several instance checks (at most one for each example). This option - controls which of both options should be used. - -Option: percentPerLengthUnit -Possible Values: 0-1 -Default: 0.05 -Effect: How much percent (wrt classification accuracy) can a concept be worse to - justify an increase in length of 1. This variable is used for GP and in - refinement when the flexible heuristic is used. For GP, you should use a - value smaller than the default. - -> general options below are ignored < -> by the refinement operator algorithm < - -Option: accuracyPenalty -Possible Values: 1-1000 -Default: 1 -Effect: Sets the penalty for "small misclassifications". - -Option: errorPenalty -Possible Values: 1-1000 -Default: 3 -Effect: Sets the penalty for classification errors. - -Option: maxLength -Possible Values: 1-20 -Default: 7 -Effect: For the brute force learner this specifies the depth limit for the - search. The GP learner currently ignores it. - -Option: scoreMethod -Possible Values: full, positive -Default: positive -Effect: The positive score method ignores if a negative examples cannot be - classified. This is often usefull, because of the limited expressiveness - of SHIQ wrt. negated role assertions. The full method penalizes this. - -Option: showCorrectClassifications -Possible Values: true, false -Default: false -Effect: Controls if correct classifications are printed (does not effect the - algorithm). - -Option: penalizeNeutralExamples -Possible Values: true, false -Default: false -Effect: If true there is a penalty if a neutral (neither positive nor negative) - individual is classified as either positive or negative. This should - usually be set to false. - -Refinement Operator Algorithm Specific --------------------------------------- - -Option: refinement.horizontalExpansionFactor -Possible Values: 0-1 -Default: 0.6 -Effect: Specifies horizontal expansion factor. - -Option: refinement.writeSearchTree -Possible Values: true, false -Default: false -Effect: Specifies whether to write the search tree to a file. - -Option: refinement.searchTreeFile -Possible Values: strings -Default: "searchTree.txt" -Effect: Specifies a file to save the current search tree after each loop of - the refinement algorithm. - -Option: refinement.heuristic -Possible Values: flexible, lexicographic -Default: lexicographic -Effect: The refinement operator together with a heuristic yields a learning - algorithm. The lexicographic heuristic uses a lexicographic order of - covered negative examples and horizontal expansion of a node (i.e. - the covered examples are the first criterion, the horizontal expansion - the second criterion). The flexible heuristic computes a combined node - score of both criteria. Note, that the lexicographic needs a horizontal - expansion factor greater than 0 to ensure correctness of the learning - algorithm. - -Option: refinement.quiet -Possible Values: true, false -Default: false -Effect: If set to true, no messages will be shown during the run of the - algorithm (but there will still be startup and summary messages). - -Option: refinement.applyAllFilter -Possible Values: true, false -Default: true -Effect: Specifies wether all equivalences should be used. - -Option: refinement.applyExistsFilter -Possible Values: true, false -Default: true -Effect: Specifies wether exists equivalences should be used. - -Option: refinement.useTooWeakList -Possible Values: true, false -Default: true -Effect: Specifies wether a too weak list should be used to reduce reasoner - requests. - -Option: refinement.useOverlyGeneralList -Possible Values: true, false -Default: true -Effect: Specifies wether an overly general list should be used to reduce - reasoner requests. - -Option: refinement.useShortConceptConstruction -Possible Values: true, false -Default: true -Effect: Specifies wether the algorithm should try to reduce a concept to a - known more general concept to reduce the number of necessary - subsumption checks for the reasoner. - -Option: refinement.useDIGMultiInstanceChecks -Possible Values: never, twoChecks, oneCheck -Default: twoChecks -Effect: The DIG protocol allows to send several queries to a DIG reasoner at - once. [This is automatically done for subsumption tests.] However, - for instance checks this has the disadvantage that it may not be - necessary to send all instance to the DIG reasoner if one of the - positive examples is not covered (meaning that the concept is - classified as too weak). - If the option is set to never, then each instance check is send - separately. - If the option is set to twoChecks, then first all positive examples will - be send in one query. If all of them are covered, i.e. the concept is - not classified as too weak, then all the negative examples are send in - one query. - If the option is set to oneCheck, then all examples will be send in one - query. - -Genetic Programming Specific ----------------------------- - -Option: gp.algorithmType -Possible Values: steadyState, generational -Default: steadyState -Effect: Uses either a steady state (population partly replaced) or generational - (population completely replaced) algorithm. - -Option: gp.elitism -Possible Values: true, false -Default: true -Effect: If true an the GP algorithm uses elitism, i.e. the best individual is - guarenteed to survive. - -Option: gp.numberOfIndividuals -Possible Values: 1-1000000 -Default: 1000 -Effect: Sets the number of individuals in the population. A higher value - improves classification, but is computationally more expensive. - -Option: gp.numberOfSelectedIndividuals -Possible Values: 1-1000000 -Default: 960 -Effect: Sets the number of individuals, which are selected for replacement in a - steady state GP algorithm. - -Option: gp.crossoverPercent -Possible Values: 0-100 -Default: 95 -Effect: The probability that offspring is produced using crossover (in contrast - to simply being copied over to the next generation). - -Option: gp.mutationPercent -Possible Values: 0-100 -Default: 3 -Effect: The probability that offspring is mutated after reproduction. - -Option: gp.hillClimbingPercent -Possible Values: 0-100 -Default: 0 -Effect: The probability that offspring is produced using the hill climbing - operator. - -Option: gp.refinementPercent -Possible Values: 0-100 -Default: 0 -Effect: The probability that offspring is produced using the genetic refinement - operator. - -Option: gp.postConvergenceGenerations -Possible Values: 10-1000 -Default: 50 -Effect: If the algorithm does not find a better solution for this number of - generations it stops. Added: trunk/doc/kbFileSyntax.txt =================================================================== --- trunk/doc/kbFileSyntax.txt (rev 0) +++ trunk/doc/kbFileSyntax.txt 2008-02-18 15:54:00 UTC (rev 604) @@ -0,0 +1,52 @@ +Some learning examples use background knowledge in *.kb files. These +files use a DL-Learner internal convenience syntax and can be converted +to OWL. + +Here are some examples for axioms in this syntax: + +person = (male OR female). +mother = (female AND EXISTS hasChild.TOP). +motherManyDaughters = (female AND >= 4 hasChild.female). +(mother AND father) SUBCLASSOF person. + +This is the EBNF description of the input language [slightly outdated]: + +Number = ["1"-"9"] (["0"-"9"])* +Id = ["a"-"z"] (["_","a"-"z","A"-"Z","0"-"9"])* +String: "\"" (~["\"","\\","\n","\r"])* "\"" +Instruction = ConfOption + | FunctionCall + | PosExample + | NegExample + | ABoxConcept + | ABoxRole + | Transitive + | Functional + | Symmetric + | Inverse + | Subrole + | TBoxEquiv + | TBoxSub +ConfOption = Id [ "." Id ] "=" ( Id | Number ) ";" +FunctionCall = Id "(" String ")" ";" +PosExample = "+" Id "(" Id ")" "." +NegExample = "-" Id "(" Id ")" "." +ABoxConcept = Concept "(" Id ")" "." +ABoxRole = Id "(" Id "," Id ")" "." +Transitive = "Transitive" "(" Id ")" "." +Functional = "Functional" "(" Id ")" "." +Symmetric = "Symmetric" "(" Id ")" "." +Inverse = "Inverse" "(" Id "," Id ")" "." +Subrole = "Subrole" "(" Id "," Id ")" "." +TBoxEquiv = Concept "=" Concept "." +TBoxSub = Concept ("SUBCLASSOF" | "SUB" ) Concept "." +Concept = "TOP" + | "BOTTOM" + | Id + | "(" Concept "AND" Concept ")" + | "(" Concept "OR" Concept ")" + | "EXISTS" Id "." Concept + | "ALL" Id "." Concept + | "NOT" Concept + | ">=" Number Id "." Concept + | "<=" Number Id "." Concept Deleted: trunk/doc/sparqlModule.txt =================================================================== --- trunk/doc/sparqlModule.txt 2008-02-18 15:35:03 UTC (rev 603) +++ trunk/doc/sparqlModule.txt 2008-02-18 15:54:00 UTC (rev 604) @@ -1,310 +0,0 @@ -SPARQL-module - -The SPARQL module is integrated into the DL-Learner and cannot be run alone any more. -You can just ignore the second part of this file, although it might describe some things -in more detail. - - -Here are some things that have to be included in the .conf file -to get a working example: - -loadJarFile("lib/modules/sparqlModule.jar"); -runPreprocessingModule("org.dllearner.modules.SparqlModule"); - -Options: -======== - -sparqlModule.numberOfRecursion = 2; -(range: 1-3) -Influences how deep the Ontology will be cut out -Default: not sure 2 at the moment -1: means only the selected individuals are loaded -2: adds classes of the individuals and properties -3: adds classes of properties of individuals and superclasses of classes - - -sparqlModule.filterMode = 0; -(range 0-2) -These are presets for SPARQL filters (see below for configurations) -0 only yago classes -1 only categories -2 skos and categories - - -With the next options SPARQL filters can be configured in more detail. -USE EITHER THIS or the filtermode above. - -sparqlModule.sparqlPredicateFilterList={"http://www.w3.org/2004/02/skos/core", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/property/wikipage-", - "http://www.w3.org/2002/07/owl#sameAs", - "http://dbpedia.org/property/reference"}; - -sparqlModule.sparqlObjectFilterList={"http://dbpedia.org/resource/Category:Articles_", - "http://dbpedia.org/resource/Category:Wikipedia_", - "http://xmlns.com/foaf/0.1/", - "http://dbpedia.org/resource/Category", - "http://dbpedia.org/resource/Template", - "http://upload.wikimedia.org/wikipedia/commons"}; - - -With the next option you can state namespaces that shall be typed as classes: -sparqlModule.classList={"http://dbpedia.org/class/yago", - "http://dbpedia.org/resource/Category:", - "http://dbpedia.org/resource/Template:", - "http://www.w3.org/2004/02/skos/core", - "http://dbpedia.org/class/"}; - -Hint: You can maybe export the ontology with the help of kaon2 in rdf format using: -export("file.owl"); -But it is untested. - - - -Attention******************************************* -below is a description of how to run the sparql module as a standalone application -this was disabled though and does NOT work at the moment - -Algorithm: -========= - -Start with: -java dbpedia.SparqlModule ListOfIndividuals - -ListOfIndividuals= Individual | ListOfIndividuals | {} - -The String[] args of the main method is taken as input at the moment. -This will be changed later, it is only for testing. - -Example: -java dbpedia.SparqlModule http://dbpedia.org/resource/Angela_Merkel http://dbpedia.org/resource/Gerhard_Schr%C3%B6der http://dbpedia.org/resource/Helmut_Kohl http://dbpedia.org/resource/Helmut_Schmidt http://dbpedia.org/resource/Ludwig_Erhard http://dbpedia.org/resource/Willy_Brandt - -See bottom for more examples - -*Sparql-Queries*** - -Example Sparql-Query see Options below for more info about FILTERS: - -SELECT * WHERE { -<http://dbpedia.org/resource/Angela_Merkel> ?predicate ?object. -FILTER( -(!isLiteral(?object)) -&&( !regex(str(?predicate), 'http://www.w3.org/2004/02/skos/core#') ) -&&( !regex(str(?predicate), 'http://xmlns.com/foaf/0.1/') ) -&&( !regex(str(?predicate), 'http://dbpedia.org/property/wikipage-') ) -&&( !regex(str(?predicate), 'http://www.w3.org/2002/07/owl#sameAs') ) -&&( !regex(str(?predicate), 'http://dbpedia.org/property/reference') ) -&&( !regex((?object), 'http://xmlns.com/foaf/0.1/') )).} - - -*Typing*** -DBpedia is not typed at all. -I replaced/added the following to comply to OWL-DL: - -Resources are sorted in three categories (Class, Property, Instance) -(implemented as HashSets) according to their uris -Properties are typed as Properties -Classes see below Options -Instances is what is left - -*Output*** -The Ontology is written to a file System.currentMillisec().nt - - - -*Cache*** -the cache remembers: a timestamp, the original sparql-query, the result -key is the subject http://dbpedia.org/resource/Angela_Merkel which is first urlencoded -and so serves as the hash for the filename. -Cache validates if timestamp too old and Sparql-Query the same -before returning the SPARQL xml-result - - - - - -Options: -======== - -All Options cannot be altered right now, except in the source. -This will be later moved to the constructor. -Default is always used. -Some are marked with "NYI", these are Not Yet Implemented and are just ideas I had. - -Main-Algorithm: ---------------- - -*NumberOfRecursion (int)*** - -how deep will the Ontology be cut out - -Default: ??? not sure 2 at the moment - -*ClassRepeat NYI (int)*** - -Recursion can be increased, if the Object is a class. -This will result in deeper, richer hierarchies - - -Server: ------- - -there are no options planned right now hardcoded with: -dbpedia.openlinksw.com:8890 - - -Sparql-queries: ---------------- - -*SparqlFilterPredicate (String[])*** - -removes all rows with the specified String in the PREDICATE from the ResultSet -(integrated in SPARQL query with FILTER !regex) - -Default: -"http://www.w3.org/2004/02/skos/core#" // conflict: no properties between classes allowed -"http://xmlns.com/foaf/0.1/" //foaf is not well defined/unnecessary -"http://dbpedia.org/property/wikipage-" // foreign wikis -fr -es, etc. -"http://www.w3.org/2002/07/owl#sameAs" // contradicts with owl-dl maybe changed later so it will be replaced by owl:equivalentClass for Classes -"http://dbpedia.org/property/reference" //links to homepages - -*SparqlFilterObject (String[])*** -(integrated in SPARQL query with FILTER !regex) - -removes all rows with the specified String in the OBJECT from the ResultSet - -Default: -http://xmlns.com/foaf/0.1/ //foaf is not well defined/unnecessary - -*useLiterals (boolean)*** -(integrated in SPARQL query with FILTER !isLiteral) - -removes all rows with literals as objects from the ResultSet - -Default: false - - - -Type Information: ------------------ - -options that are used to type resources to retain OWL-DL - -*IsClass (String[])*** - -specifies URI's that can be considered classes (and therefore will be typed as classes) - -Default: -"http://dbpedia.org/resource/Category:" //dbpedia categories -"http://www.w3.org/2004/02/skos/core" // concept -"http://dbpedia.org/class/yago" // yago classes -"http://dbpedia.org/class/" // "yago" is missing in some of the Datasets from DBpedia, -I don't actually know if this is a mistake or on purpose - - -Cache: ------- - - -*freshdays (int)*** - -Determines how many days a cached query stays valid - -Default: 15 days - -*path (String)*** - -path of the cache-folder - -Default: cache - - - -Syntactic Sugar NYI ----------------- -Instead of choosing example individuals to put into main, -it could be nice to choose 2 Categories. -The module automatically downloads the individals for both classes itself, -removes the category from the definiton and then runs the DL-Learner. - - - - - -Examples: -========= - -German Leaders: - -Input: -http://dbpedia.org/resource/Adolf_Hitler http://dbpedia.org/resource/Prince_Chlodwig_zu_HohenloheSchillingsf%C3%BCrst http://dbpedia.org/resource/Prince_Maximilian_of_Baden http://dbpedia.org/resource/Franz_von_Papen http://dbpedia.org/resource/Joseph_Goebbels http://dbpedia.org/resource/Gerhard_Schr%C3%B6der http://dbpedia.org/resource/Angela_Merkel http://dbpedia.org/resource/Helmut_Kohl http://dbpedia.org/resource/Helmut_Schmidt http://dbpedia.org/resource/Ludwig_Erhard http://dbpedia.org/resource/Willy_Brandt - -conf: - -+test("http://dbpedia.org/resource/Adolf_Hitler"). -+test("http://dbpedia.org/resource/Prince_Chlodwig_zu_Hohenlohe-Schillingsf%C3%BCrst"). -+test("http://dbpedia.org/resource/Prince_Maximilian_of_Baden"). -+test("http://dbpedia.org/resource/Franz_von_Papen"). -+test("http://dbpedia.org/resource/Joseph_Goebbels"). -+test("http://dbpedia.org/resource/Gerhard_Schr%C3%B6der"). -+test("http://dbpedia.org/resource/Angela_Merkel"). -+test("http://dbpedia.org/resource/Helmut_Kohl"). -+test("http://dbpedia.org/resource/Helmut_Schmidt"). -+test("http://dbpedia.org/resource/Ludwig_Erhard"). -+test("http://dbpedia.org/resource/Willy_Brandt"). - - -Greek Philosophers: - -Input - - http://dbpedia.org/resource/Socrates http://dbpedia.org/resource/Plato http://dbpedia.org/resource/Pythagoras http://dbpedia.org/resource/Zeno_of_Elea http://dbpedia.org/resource/Democritus http://dbpedia.org/resource/Theophrastus http://dbpedia.org/resource/Anaxagoras http://dbpedia.org/resource/Alexander_the_Great http://dbpedia.org/resource/Hephaestion http://dbpedia.org/resource/Diotima http://dbpedia.org/resource/Nicomachus http://dbpedia.org/resource/Harpalus http://dbpedia.org/resource/Menaechmus - -Conf - -+test("http://dbpedia.org/resource/Socrates"). -+test("http://dbpedia.org/resource/Plato"). -+test("http://dbpedia.org/resource/Pythagoras"). - -+test("http://dbpedia.org/resource/Zeno_of_Elea"). -//was a pre-Socratic Greek philosopher -+test("http://dbpedia.org/resource/Democritus"). -//was a pre-Socratic Greek philosopher -//+test("http://dbpedia.org/resource/Theophrastus"). -//a native of Eressos in Lesbos, was the successor of Aristotle in the Peripatetic school. -+test("http://dbpedia.org/resource/Anaxagoras"). -//was a pre-Socratic Greek philosopher. - --test("http://dbpedia.org/resource/Alexander_the_Great"). --test("http://dbpedia.org/resource/Hephaestion"). -//makedonischer Adeliger, der engste Freund, General, Leibw\xE4chter und vielleicht auch Geliebter Alexanders des Gro\xDFen. --test("http://dbpedia.org/resource/Diotima"). -//ist eine literarische Figur Platons, eine Seherin aus Mantin\xE4a, --test("http://dbpedia.org/resource/Nicomachus"). -//Nicomachus sp\xE4tantiker r\xF6mischer Schriftsteller und Politiker --test("http://dbpedia.org/resource/Harpalus"). -//Harpalus was an aristocrat of Macedon and boyhood friend of Alexander the Great --test("http://dbpedia.org/resource/Menaechmus"). -//was a Greek mathematician and geometer born in Alopeconnesus - - - - - - - - - - - - - - - - - - - - - - Modified: trunk/src/dl-learner/org/dllearner/Info.java =================================================================== --- trunk/src/dl-learner/org/dllearner/Info.java 2008-02-18 15:35:03 UTC (rev 603) +++ trunk/src/dl-learner/org/dllearner/Info.java 2008-02-18 15:54:00 UTC (rev 604) @@ -3,6 +3,6 @@ package org.dllearner; public class Info { - public static final String build = "2008-02-06"; + public static final String build = "2008-02-18"; } \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-02-22 15:16:40
|
Revision: 626 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=626&view=rev Author: jenslehmann Date: 2008-02-22 07:16:02 -0800 (Fri, 22 Feb 2008) Log Message: ----------- implemented lightning fast instance check algorithm Modified Paths: -------------- trunk/lib/components.ini trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java trunk/src/dl-learner/org/dllearner/algorithms/refinement/ROLearner.java trunk/src/dl-learner/org/dllearner/cli/Start.java trunk/src/dl-learner/org/dllearner/core/ReasoningMethodUnsupportedException.java trunk/src/dl-learner/org/dllearner/core/owl/NamedClass.java trunk/src/dl-learner/org/dllearner/core/owl/ObjectProperty.java trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java trunk/src/dl-learner/org/dllearner/parser/kb.jj trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java trunk/src/dl-learner/org/dllearner/reasoning/FastRetrievalReasoner.java trunk/src/dl-learner/org/dllearner/reasoning/ReasonerType.java trunk/src/dl-learner/org/dllearner/test/junit/ReasonerTests.java Modified: trunk/lib/components.ini =================================================================== --- trunk/lib/components.ini 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/lib/components.ini 2008-02-22 15:16:02 UTC (rev 626) @@ -8,6 +8,7 @@ org.dllearner.reasoning.OWLAPIReasoner org.dllearner.reasoning.DIGReasoner org.dllearner.reasoning.FastRetrievalReasoner +org.dllearner.reasoning.FastInstanceChecker # learning problems org.dllearner.learningproblems.PosNegDefinitionLP org.dllearner.learningproblems.PosNegInclusionLP Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java 2008-02-22 15:16:02 UTC (rev 626) @@ -621,6 +621,8 @@ // System.out.println("properness max recursion depth: " + maxRecDepth); // System.out.println("max. number of one-step refinements: " + maxNrOfRefinements); // System.out.println("max. number of children of a node: " + maxNrOfChildren); + System.out.println("subsumption time: " + Helper.prettyPrintNanoSeconds(rs.getSubsumptionReasoningTimeNs())); + System.out.println("instance check time: " + Helper.prettyPrintNanoSeconds(rs.getInstanceCheckReasoningTimeNs())); } if(computeBenchmarkInformation) { Modified: trunk/src/dl-learner/org/dllearner/algorithms/refinement/ROLearner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refinement/ROLearner.java 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/algorithms/refinement/ROLearner.java 2008-02-22 15:16:02 UTC (rev 626) @@ -900,6 +900,8 @@ // System.out.println("properness max recursion depth: " + maxRecDepth); // System.out.println("max. number of one-step refinements: " + maxNrOfRefinements); // System.out.println("max. number of children of a node: " + maxNrOfChildren); + logger.debug("subsumption time: " + Helper.prettyPrintNanoSeconds(rs.getSubsumptionReasoningTimeNs())); + logger.debug("instance check time: " + Helper.prettyPrintNanoSeconds(rs.getInstanceCheckReasoningTimeNs())); } if(showBenchmarkInformation) { Modified: trunk/src/dl-learner/org/dllearner/cli/Start.java =================================================================== --- trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/cli/Start.java 2008-02-22 15:16:02 UTC (rev 626) @@ -78,6 +78,7 @@ import org.dllearner.parser.ParseException; import org.dllearner.parser.TokenMgrError; import org.dllearner.reasoning.DIGReasoner; +import org.dllearner.reasoning.FastInstanceChecker; import org.dllearner.reasoning.FastRetrievalReasoner; import org.dllearner.reasoning.OWLAPIReasoner; import org.dllearner.utilities.ConceptComparator; @@ -751,6 +752,8 @@ reasonerClass = OWLAPIReasoner.class; else if (reasonerOption.getStringValue().equals("fastRetrieval")) reasonerClass = FastRetrievalReasoner.class; + else if (reasonerOption.getStringValue().equals("fastInstanceChecker")) + reasonerClass = FastInstanceChecker.class; else { handleError("Unknown value " + reasonerOption.getStringValue() + " for option \"reasoner\"."); Modified: trunk/src/dl-learner/org/dllearner/core/ReasoningMethodUnsupportedException.java =================================================================== --- trunk/src/dl-learner/org/dllearner/core/ReasoningMethodUnsupportedException.java 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/core/ReasoningMethodUnsupportedException.java 2008-02-22 15:16:02 UTC (rev 626) @@ -1,7 +1,44 @@ +/** + * Copyright (C) 2007, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ package org.dllearner.core; +/** + * Exception indicating that a reasoner implementation cannot support + * the requested operation. Either the operation itself is not implemented + * or does not support certain features, e.g. a reasoner could support + * instance checks but not if the class description contains datatype + * constructs. + * + * @author Jens Lehmann + * + */ public class ReasoningMethodUnsupportedException extends Exception { private static final long serialVersionUID = -7045236443032695475L; + + public ReasoningMethodUnsupportedException() { + super(); + } + + public ReasoningMethodUnsupportedException(String message) { + super(message); + } } Modified: trunk/src/dl-learner/org/dllearner/core/owl/NamedClass.java =================================================================== --- trunk/src/dl-learner/org/dllearner/core/owl/NamedClass.java 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/core/owl/NamedClass.java 2008-02-22 15:16:02 UTC (rev 626) @@ -30,7 +30,7 @@ * @author Jens Lehmann * */ -public class NamedClass extends Description implements NamedKBElement { +public class NamedClass extends Description implements NamedKBElement, Comparable<NamedClass> { String name; @@ -79,4 +79,8 @@ public String toManchesterSyntaxString(String baseURI, Map<String, String> prefixes) { return Helper.getAbbreviatedString(name, baseURI, prefixes); } + + public int compareTo(NamedClass o) { + return name.compareTo(o.name); + } } Modified: trunk/src/dl-learner/org/dllearner/core/owl/ObjectProperty.java =================================================================== --- trunk/src/dl-learner/org/dllearner/core/owl/ObjectProperty.java 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/core/owl/ObjectProperty.java 2008-02-22 15:16:02 UTC (rev 626) @@ -30,7 +30,7 @@ * @author Jens Lehmann * */ -public class ObjectProperty extends ObjectPropertyExpression implements Property, NamedKBElement { +public class ObjectProperty extends ObjectPropertyExpression implements Property, NamedKBElement, Comparable<ObjectProperty> { public ObjectProperty(String name) { super(name); @@ -51,5 +51,9 @@ public void accept(KBElementVisitor visitor) { visitor.visit(this); + } + + public int compareTo(ObjectProperty o) { + return name.compareTo(o.name); } } Modified: trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java =================================================================== --- trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java 2008-02-22 15:16:02 UTC (rev 626) @@ -169,6 +169,8 @@ File confTrainFile = new File("examples/carcinogenesis/train.conf"); Files.clearFile(confTrainFile); String confHeader = "import(\"pte.owl\");\n\n"; + confHeader += "refinement.writeSearchTree = true;"; + confHeader += "refinement.searchTreeFile = \"log/carcinogenesis/searchTree.log\""; confHeader += "reasoner = owlAPI;\n"; Files.appendFile(confTrainFile, confHeader); Modified: trunk/src/dl-learner/org/dllearner/parser/kb.jj =================================================================== --- trunk/src/dl-learner/org/dllearner/parser/kb.jj 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/parser/kb.jj 2008-02-22 15:16:02 UTC (rev 626) @@ -36,7 +36,7 @@ public class KBParser { - public static final String internalNamespace = "http://localhost/foo#"; + public static String internalNamespace = "http://localhost/foo#"; // method to give all internal stuff an URI (not necessary for DLs, but for OWL ontologies // and it should be possible to represent the internal KB as OWL ontology) Modified: trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java 2008-02-22 15:16:02 UTC (rev 626) @@ -19,50 +19,96 @@ */ package org.dllearner.reasoning; -import java.util.HashMap; +import java.io.File; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; +import java.util.TreeMap; +import org.apache.log4j.Logger; import org.dllearner.core.ComponentInitException; +import org.dllearner.core.ComponentManager; import org.dllearner.core.KnowledgeSource; import org.dllearner.core.ReasonerComponent; +import org.dllearner.core.ReasoningMethodUnsupportedException; import org.dllearner.core.ReasoningService; import org.dllearner.core.config.ConfigEntry; import org.dllearner.core.config.InvalidConfigOptionValueException; -import org.dllearner.core.owl.FlatABox; +import org.dllearner.core.owl.Description; import org.dllearner.core.owl.Individual; +import org.dllearner.core.owl.Intersection; import org.dllearner.core.owl.NamedClass; +import org.dllearner.core.owl.Negation; +import org.dllearner.core.owl.Nothing; +import org.dllearner.core.owl.ObjectAllRestriction; import org.dllearner.core.owl.ObjectProperty; +import org.dllearner.core.owl.ObjectPropertyExpression; +import org.dllearner.core.owl.ObjectPropertyHierarchy; +import org.dllearner.core.owl.ObjectSomeRestriction; +import org.dllearner.core.owl.SubsumptionHierarchy; +import org.dllearner.core.owl.Thing; +import org.dllearner.core.owl.Union; +import org.dllearner.kb.OWLFile; +import org.dllearner.parser.KBParser; +import org.dllearner.parser.ParseException; /** * Reasoner for fast instance checks. It works by completely dematerialising the knowledge - * base to speed up later reasoning requests. It + * base to speed up later reasoning requests. It then continues by only considering one + * model of the knowledge base (TODO: more explanation), which is neither correct nor + * complete, but sufficient in many cases. A big advantage of the algorithm is that it + * does not need even need to perform any set modifications (union, intersection, difference), + * so it avoids any object creation, which makes it very fast compared to standard + * reasoners (TODO: maybe add some benchmarks once it is implemented). * + * Note: This algorithm works only on concepts in negation normal form! + * * @author Jens Lehmann * */ public class FastInstanceChecker extends ReasonerComponent { + private static Logger logger = Logger + .getLogger(FastInstanceChecker.class); + private Set<NamedClass> atomicConcepts; private Set<ObjectProperty> atomicRoles; private SortedSet<Individual> individuals; private ReasoningService rs; private ReasonerComponent rc; + private Set<KnowledgeSource> sources; + // we use sorted sets (map indices) here, because they have only log(n) + // complexity for checking whether an element is contained in them // instances of classes - public Map<NamedClass,SortedSet<Individual>> classInstancesPos = new HashMap<NamedClass,SortedSet<Individual>>(); - public Map<NamedClass,SortedSet<Individual>> classInstancesNeg = new HashMap<NamedClass,SortedSet<Individual>>(); - + private Map<NamedClass,SortedSet<Individual>> classInstancesPos = new TreeMap<NamedClass,SortedSet<Individual>>(); + private Map<NamedClass,SortedSet<Individual>> classInstancesNeg = new TreeMap<NamedClass,SortedSet<Individual>>(); // object property mappings - public Map<ObjectProperty,Map<Individual,SortedSet<Individual>>> opPos = new HashMap<ObjectProperty,Map<Individual,SortedSet<Individual>>>(); - public Map<ObjectProperty,Map<Individual,SortedSet<Individual>>> opNeg = new HashMap<ObjectProperty,Map<Individual,SortedSet<Individual>>>(); - + private Map<ObjectProperty,Map<Individual,SortedSet<Individual>>> opPos = new TreeMap<ObjectProperty,Map<Individual,SortedSet<Individual>>>(); + // TODO: datatype properties public FastInstanceChecker(Set<KnowledgeSource> sources) { - rc = new OWLAPIReasoner(sources); + this.sources = sources; + } + + + /* (non-Javadoc) + * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.config.ConfigEntry) + */ + @Override + public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { + + } + + /* (non-Javadoc) + * @see org.dllearner.core.Component#init() + */ + @Override + public void init() throws ComponentInitException { + rc = new DIGReasoner(sources); try { rc.init(); } catch (ComponentInitException e1) { @@ -77,50 +123,97 @@ // be done (maybe this can be merge again with the FastRetrievalReasoner later) long dematStartTime = System.currentTimeMillis(); - FlatABox aBox = new FlatABox(); for (NamedClass atomicConcept : rs.getAtomicConcepts()) { - // aBox.atomicConceptsPos.put(atomicConcept.getName(), getStringSet(rs - // .retrieval(atomicConcept))); -// Negation negatedAtomicConcept = new Negation(atomicConcept); - // aBox.atomicConceptsNeg.put(atomicConcept.getName(), getStringSet(rs - // .retrieval(negatedAtomicConcept))); - aBox.concepts.add(atomicConcept.getName()); + classInstancesPos.put(atomicConcept, rs.retrieval(atomicConcept)); + Negation negatedAtomicConcept = new Negation(atomicConcept); + classInstancesNeg.put(atomicConcept, rs.retrieval(negatedAtomicConcept)); } for (ObjectProperty atomicRole : rs.getAtomicRoles()) { - // aBox.rolesPos.put(atomicRole.getName(), getStringMap(rs.getRoleMembers(atomicRole))); - aBox.roles.add(atomicRole.getName()); + opPos.put(atomicRole, rs.getRoleMembers(atomicRole)); } - // aBox.domain = getStringSet(rs.getIndividuals()); - // aBox.top = aBox.domain; - - // System.out.println(aBox); - long dematDuration = System.currentTimeMillis() - dematStartTime; - System.out.println("OK (" + dematDuration + " ms)"); + logger.info("TBox dematerialised in " + dematDuration + " ms"); - } - - - /* (non-Javadoc) - * @see org.dllearner.core.Component#applyConfigEntry(org.dllearner.core.config.ConfigEntry) - */ - @Override - public <T> void applyConfigEntry(ConfigEntry<T> entry) throws InvalidConfigOptionValueException { - // TODO Auto-generated method stub - } - /* (non-Javadoc) - * @see org.dllearner.core.Component#init() - */ @Override - public void init() throws ComponentInitException { - // TODO Auto-generated method stub - - } - + public boolean instanceCheck(Description description, Individual individual) throws ReasoningMethodUnsupportedException { + if(description instanceof NamedClass) { + return classInstancesPos.get((NamedClass)description).contains(individual); + } else if(description instanceof Negation) { + Description child = description.getChild(0); + if(child instanceof NamedClass) { + return classInstancesNeg.get((NamedClass)child).contains(individual); + } else { + throw new ReasoningMethodUnsupportedException("Instance check for description " + description + " unsupported. Description needs to be in negation normal form."); + } + } else if(description instanceof Thing) { + return true; + } else if(description instanceof Nothing) { + return false; + } else if(description instanceof Union) { + // if the individual is instance of any of the subdescription of + // the union, we return true + List<Description> children = description.getChildren(); + for(Description child : children) { + if(instanceCheck(child, individual)) + return true; + } + return false; + } else if(description instanceof Intersection) { + // if the individual is instance of all of the subdescription of + // the union, we return true + List<Description> children = description.getChildren(); + for(Description child : children) { + if(!instanceCheck(child, individual)) + return false; + } + return true; + } else if(description instanceof ObjectSomeRestriction) { + ObjectPropertyExpression ope = ((ObjectSomeRestriction)description).getRole(); + if(!(ope instanceof ObjectProperty)) + throw new ReasoningMethodUnsupportedException("Instance check for description " + description + " unsupported. Inverse object properties not supported."); + ObjectProperty op = (ObjectProperty) ope; + Description child = description.getChild(0); + Map<Individual,SortedSet<Individual>> mapping = opPos.get(op);; + if(mapping == null) { + logger.warn("Instance check of a description with an undefinied property (" + op + ")."); + return false; + } + SortedSet<Individual> roleFillers = opPos.get(op).get(individual); + if(roleFillers == null) + return false; + for(Individual roleFiller : roleFillers) { + if(instanceCheck(child, roleFiller)) + return true; + } + return false; + } else if(description instanceof ObjectAllRestriction) { + ObjectPropertyExpression ope = ((ObjectAllRestriction)description).getRole(); + if(!(ope instanceof ObjectProperty)) + throw new ReasoningMethodUnsupportedException("Instance check for description " + description + " unsupported. Inverse object properties not supported."); + ObjectProperty op = (ObjectProperty) ope; + Description child = description.getChild(0); + Map<Individual,SortedSet<Individual>> mapping = opPos.get(op);; + if(mapping == null) { + logger.warn("Instance check of a description with an undefinied property (" + op + ")."); + return true; + } + SortedSet<Individual> roleFillers = opPos.get(op).get(individual); + if(roleFillers == null) + return true; + for(Individual roleFiller : roleFillers) { + if(!instanceCheck(child, roleFiller)) + return false; + } + return true; + } + + throw new ReasoningMethodUnsupportedException("Instance check for description " + description + " unsupported."); + } + /* (non-Javadoc) * @see org.dllearner.core.Reasoner#getAtomicConcepts() */ @@ -146,24 +239,62 @@ * @see org.dllearner.core.Reasoner#getReasonerType() */ public ReasonerType getReasonerType() { - // TODO Auto-generated method stub - return null; + return ReasonerType.FAST_INSTANCE_CHECKER; } /* (non-Javadoc) * @see org.dllearner.core.Reasoner#prepareSubsumptionHierarchy(java.util.Set) */ public void prepareSubsumptionHierarchy(Set<NamedClass> allowedConcepts) { - // TODO Auto-generated method stub + rs.prepareSubsumptionHierarchy(); + } + @Override + public SubsumptionHierarchy getSubsumptionHierarchy() { + return rs.getSubsumptionHierarchy(); + } + + @Override + public void prepareRoleHierarchy(Set<ObjectProperty> allowedRoles) { + rs.prepareRoleHierarchy(allowedRoles); + } + + @Override + public ObjectPropertyHierarchy getRoleHierarchy() { + return rs.getRoleHierarchy(); } - + + @Override + public boolean subsumes(Description superConcept, Description subConcept) { +// Negation neg = new Negation(subConcept); +// Intersection c = new Intersection(neg,superConcept); +// return fastRetrieval.calculateSets(c).getPosSet().isEmpty(); + return rs.subsumes(superConcept, subConcept); + } + /** - * @param args + * Test method for fast instance checker. + * @param args No arguments supported. + * @throws ComponentInitException + * @throws ParseException + * @throws ReasoningMethodUnsupportedException */ - public static void main(String[] args) { - // TODO Auto-generated method stub - + public static void main(String[] args) throws ComponentInitException, ParseException, ReasoningMethodUnsupportedException { + ComponentManager cm = ComponentManager.getInstance(); + OWLFile owl = cm.knowledgeSource(OWLFile.class); + String owlFile = new File("examples/family/father.owl").toURI().toString(); + cm.applyConfigEntry(owl, "url", owlFile); + owl.init(); + ReasonerComponent reasoner = cm.reasoner(FastInstanceChecker.class, owl); + cm.reasoningService(reasoner); + reasoner.init(); + + KBParser.internalNamespace = "http://example.com/father#"; + String query = "(male AND EXISTS hasChild.TOP)"; + Description d = KBParser.parseConcept(query); + System.out.println(d); + Individual i = new Individual("http://example.com/father#markus"); + System.out.println(reasoner.instanceCheck(d, i)); } } Modified: trunk/src/dl-learner/org/dllearner/reasoning/FastRetrievalReasoner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/FastRetrievalReasoner.java 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/reasoning/FastRetrievalReasoner.java 2008-02-22 15:16:02 UTC (rev 626) @@ -114,9 +114,10 @@ // C \sqsubseteq D is rewritten to a retrieval for \not C \sqcap D @Override public boolean subsumes(Description superConcept, Description subConcept) { - Negation neg = new Negation(subConcept); - Intersection c = new Intersection(neg,superConcept); - return fastRetrieval.calculateSets(c).getPosSet().isEmpty(); +// Negation neg = new Negation(subConcept); +// Intersection c = new Intersection(neg,superConcept); +// return fastRetrieval.calculateSets(c).getPosSet().isEmpty(); + return rs.subsumes(superConcept, subConcept); } @Override Modified: trunk/src/dl-learner/org/dllearner/reasoning/ReasonerType.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/ReasonerType.java 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/reasoning/ReasonerType.java 2008-02-22 15:16:02 UTC (rev 626) @@ -4,5 +4,5 @@ package org.dllearner.reasoning; public enum ReasonerType { - KAON2, DIG, FAST_RETRIEVAL, FACT, PELLET + KAON2, DIG, FAST_RETRIEVAL, FACT, PELLET, FAST_INSTANCE_CHECKER } \ No newline at end of file Modified: trunk/src/dl-learner/org/dllearner/test/junit/ReasonerTests.java =================================================================== --- trunk/src/dl-learner/org/dllearner/test/junit/ReasonerTests.java 2008-02-22 12:50:11 UTC (rev 625) +++ trunk/src/dl-learner/org/dllearner/test/junit/ReasonerTests.java 2008-02-22 15:16:02 UTC (rev 626) @@ -88,7 +88,12 @@ for (Class<? extends ReasonerComponent> reasonerClass : reasonerClasses) { ReasonerComponent reasoner = cm.reasoner(reasonerClass, ks); reasoner.init(); - boolean result = reasoner.instanceCheck(d, i); +// long startTime = System.nanoTime(); + boolean result = false; +// for(int n=0; n<10000; n++) { + result = reasoner.instanceCheck(d, i); +// } +// long time = System.nanoTime() - startTime; logger.debug("instance check: " + reasoner + " " + d + " " + i + " " + result); assertTrue(result); } This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-03-03 19:40:24
|
Revision: 677 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=677&view=rev Author: jenslehmann Date: 2008-03-03 11:38:48 -0800 (Mon, 03 Mar 2008) Log Message: ----------- several bugfixes and first succesful run of new refinement operator on arch example Modified Paths: -------------- trunk/examples/arch/arch.conf trunk/examples/arch/arch.kb trunk/examples/arch/arch.owl trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java Modified: trunk/examples/arch/arch.conf =================================================================== --- trunk/examples/arch/arch.conf 2008-03-03 15:49:03 UTC (rev 676) +++ trunk/examples/arch/arch.conf 2008-03-03 19:38:48 UTC (rev 677) @@ -16,13 +16,12 @@ * Copyright (C) 2007, Sebastian Hellmann */ +algorithm = refexamples; +reasoner = fastInstanceChecker; -//export("arch.owl"); - -/** background knowledge **/ +// export("arch.owl"); import("arch.kb"); - /** examples **/ +c1 +c4 Modified: trunk/examples/arch/arch.kb =================================================================== --- trunk/examples/arch/arch.kb 2008-03-03 15:49:03 UTC (rev 676) +++ trunk/examples/arch/arch.kb 2008-03-03 19:38:48 UTC (rev 677) @@ -1,6 +1,11 @@ parallelpipe = (brick OR wedge). -freeStandingPillar = (pillar AND NOT EXISTS touches.TOP ) . +freeStandingPillar = (pillar AND NOT EXISTS touches.TOP ). +// disjoint classes +(parallelpipe AND pillar) = BOTTOM. +(construction AND pillar) = BOTTOM. +(parallelpipe AND construction) = BOTTOM. + construction(c1). construction(c2). construction(c3). @@ -24,6 +29,8 @@ wedge(d). wedge(e). +OPDOMAIN(hasPillar) = construction. +OPRANGE(hasPillar) = pillar. hasPillar(c1,p1). hasPillar(c1,p2). hasPillar(c2,p3). @@ -34,12 +41,16 @@ hasPillar(c4,p8). hasPillar(c5,p9). +OPDOMAIN(hasParallelpipe) = construction. +OPRANGE(hasParallelpipe) = parallelpipe. hasParallelpipe(c1,a). hasParallelpipe(c2,b). hasParallelpipe(c3,c). hasParallelpipe(c4,d). hasParallelpipe(c5,e). +OPDOMAIN(supports) = pillar. +OPRANGE(supports) = parallelpipe. supports(p1,a). supports(p2,a). supports(p5,c). @@ -48,11 +59,15 @@ supports(p8,d). supports(p9,e). +OPDOMAIN(leftof) = pillar. +OPRANGE(leftof) = pillar. leftof(p1,p2). leftof(p3,p4). leftof(p5,p6). leftof(p7,p8). +OPDOMAIN(touches) = pillar. +OPRANGE(touches) = pillar. touches(p5,p6). freeStandingPillar(p1). Modified: trunk/examples/arch/arch.owl =================================================================== --- trunk/examples/arch/arch.owl 2008-03-03 15:49:03 UTC (rev 676) +++ trunk/examples/arch/arch.owl 2008-03-03 19:38:48 UTC (rev 677) @@ -1,115 +1,362 @@ -<?xml version="1.0"?> -<!DOCTYPE rdf:RDF [ - <!ENTITY owl 'http://www.w3.org/2002/07/owl#'> -]> -<rdf:RDF xml:base="http://localhost/foo" -xmlns:a="http://localhost/foo#" -xmlns:owl="http://www.w3.org/2002/07/owl#" -xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" -xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" -xmlns:prefix1="http://localhost/foo#"> - <owl:Ontology rdf:about=""/> - <owl:Class rdf:ID="freeStandingPillar"> - <owl:equivalentClass> - <owl:Class> - <owl:intersectionOf rdf:parseType="Collection"> - <owl:Class rdf:about="#pillar"/> - <owl:Class> - <owl:complementOf> - <owl:Restriction> - <owl:onProperty rdf:resource="#touches"/> - <owl:someValuesFrom rdf:resource="&owl;Thing"/> - </owl:Restriction> - </owl:complementOf> - </owl:Class> - </owl:intersectionOf> - </owl:Class> - </owl:equivalentClass> - </owl:Class> - <owl:Class rdf:ID="parallelpipe"> - <owl:equivalentClass> - <owl:Class> - <owl:unionOf rdf:parseType="Collection"> - <owl:Class rdf:about="#brick"/> - <owl:Class rdf:about="#wedge"/> - </owl:unionOf> - </owl:Class> - </owl:equivalentClass> - </owl:Class> - <prefix1:brick rdf:ID="a"/> - <prefix1:brick rdf:ID="b"/> - <prefix1:brick rdf:ID="c"/> - <prefix1:construction rdf:ID="c1"> - <prefix1:hasParallelpipe rdf:resource="#a"/> - <prefix1:hasPillar rdf:resource="#p1"/> - <prefix1:hasPillar rdf:resource="#p2"/> - </prefix1:construction> - <prefix1:construction rdf:ID="c2"> - <prefix1:hasParallelpipe rdf:resource="#b"/> - <prefix1:hasPillar rdf:resource="#p3"/> - <prefix1:hasPillar rdf:resource="#p4"/> - </prefix1:construction> - <prefix1:construction rdf:ID="c3"> - <prefix1:hasParallelpipe rdf:resource="#c"/> - <prefix1:hasPillar rdf:resource="#p5"/> - <prefix1:hasPillar rdf:resource="#p6"/> - </prefix1:construction> - <prefix1:construction rdf:ID="c4"> - <prefix1:hasParallelpipe rdf:resource="#d"/> - <prefix1:hasPillar rdf:resource="#p7"/> - <prefix1:hasPillar rdf:resource="#p8"/> - </prefix1:construction> - <prefix1:construction rdf:ID="c5"> - <prefix1:hasParallelpipe rdf:resource="#e"/> - <prefix1:hasPillar rdf:resource="#p9"/> - </prefix1:construction> - <prefix1:wedge rdf:ID="d"/> - <prefix1:wedge rdf:ID="e"/> - <prefix1:freeStandingPillar rdf:ID="p1"> - <rdf:type rdf:resource="#pillar"/> - <prefix1:leftof rdf:resource="#p2"/> - <prefix1:supports rdf:resource="#a"/> - </prefix1:freeStandingPillar> - <prefix1:freeStandingPillar rdf:ID="p2"> - <rdf:type rdf:resource="#pillar"/> - <prefix1:supports rdf:resource="#a"/> - </prefix1:freeStandingPillar> - <prefix1:freeStandingPillar rdf:ID="p3"> - <rdf:type rdf:resource="#pillar"/> - <prefix1:leftof rdf:resource="#p4"/> - </prefix1:freeStandingPillar> - <prefix1:freeStandingPillar rdf:ID="p4"> - <rdf:type rdf:resource="#pillar"/> - </prefix1:freeStandingPillar> - <prefix1:pillar rdf:ID="p5"> - <prefix1:leftof rdf:resource="#p6"/> - <prefix1:supports rdf:resource="#c"/> - <prefix1:touches rdf:resource="#p6"/> - </prefix1:pillar> - <prefix1:pillar rdf:ID="p6"> - <prefix1:supports rdf:resource="#c"/> - </prefix1:pillar> - <prefix1:freeStandingPillar rdf:ID="p7"> - <rdf:type rdf:resource="#pillar"/> - <prefix1:leftof rdf:resource="#p8"/> - <prefix1:supports rdf:resource="#d"/> - </prefix1:freeStandingPillar> - <prefix1:freeStandingPillar rdf:ID="p8"> - <rdf:type rdf:resource="#pillar"/> - <prefix1:supports rdf:resource="#d"/> - </prefix1:freeStandingPillar> - <prefix1:freeStandingPillar rdf:ID="p9"> - <rdf:type rdf:resource="#pillar"/> - <prefix1:supports rdf:resource="#e"/> - </prefix1:freeStandingPillar> - <owl:Class rdf:ID="brick"/> - <owl:Class rdf:ID="construction"/> - <owl:Class rdf:ID="pillar"/> - <owl:Class rdf:ID="wedge"/> - <owl:Class rdf:about="http://www.w3.org/2002/07/owl#Thing"/> - <owl:ObjectProperty rdf:ID="hasParallelpipe"/> - <owl:ObjectProperty rdf:ID="hasPillar"/> - <owl:ObjectProperty rdf:ID="leftof"/> - <owl:ObjectProperty rdf:ID="supports"/> - <owl:ObjectProperty rdf:ID="touches"/> -</rdf:RDF> +<?xml version="1.0"?> + + +<!DOCTYPE rdf:RDF [ + <!ENTITY foo "http://localhost/foo#" > + <!ENTITY owl "http://www.w3.org/2002/07/owl#" > + <!ENTITY owl11 "http://www.w3.org/2006/12/owl11#" > + <!ENTITY xsd "http://www.w3.org/2001/XMLSchema#" > + <!ENTITY owl11xml "http://www.w3.org/2006/12/owl11-xml#" > + <!ENTITY rdfs "http://www.w3.org/2000/01/rdf-schema#" > + <!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#" > +]> + + +<rdf:RDF xmlns="http://example.com#" + xml:base="http://example.com" + xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" + xmlns:owl11="http://www.w3.org/2006/12/owl11#" + xmlns:owl11xml="http://www.w3.org/2006/12/owl11-xml#" + xmlns:owl="http://www.w3.org/2002/07/owl#" + xmlns:xsd="http://www.w3.org/2001/XMLSchema#" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:foo="http://localhost/foo#"> + <owl:Ontology rdf:about=""/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Object Properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://localhost/foo#hasParallelpipe --> + + <owl:ObjectProperty rdf:about="&foo;hasParallelpipe"> + <rdfs:range rdf:resource="&foo;parallelpipe"/> + <rdfs:domain rdf:resource="&foo;construction"/> + </owl:ObjectProperty> + + + + <!-- http://localhost/foo#hasPillar --> + + <owl:ObjectProperty rdf:about="&foo;hasPillar"> + <rdfs:domain rdf:resource="&foo;construction"/> + <rdfs:range rdf:resource="&foo;pillar"/> + </owl:ObjectProperty> + + + + <!-- http://localhost/foo#leftof --> + + <owl:ObjectProperty rdf:about="&foo;leftof"> + <rdfs:domain rdf:resource="&foo;pillar"/> + <rdfs:range rdf:resource="&foo;pillar"/> + </owl:ObjectProperty> + + + + <!-- http://localhost/foo#supports --> + + <owl:ObjectProperty rdf:about="&foo;supports"> + <rdfs:range rdf:resource="&foo;parallelpipe"/> + <rdfs:domain rdf:resource="&foo;pillar"/> + </owl:ObjectProperty> + + + + <!-- http://localhost/foo#touches --> + + <owl:ObjectProperty rdf:about="&foo;touches"> + <rdfs:domain rdf:resource="&foo;pillar"/> + <rdfs:range rdf:resource="&foo;pillar"/> + </owl:ObjectProperty> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Classes + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://localhost/foo#brick --> + + <owl:Class rdf:about="&foo;brick"/> + + + + <!-- http://localhost/foo#construction --> + + <owl:Class rdf:about="&foo;construction"/> + + + + <!-- http://localhost/foo#freeStandingPillar --> + + <owl:Class rdf:about="&foo;freeStandingPillar"> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&foo;pillar"/> + <owl:Class> + <owl:complementOf> + <owl:Restriction> + <owl:onProperty rdf:resource="&foo;touches"/> + <owl:someValuesFrom rdf:resource="&owl;Thing"/> + </owl:Restriction> + </owl:complementOf> + </owl:Class> + </owl:intersectionOf> + </owl:Class> + </owl:equivalentClass> + </owl:Class> + + + + <!-- http://localhost/foo#parallelpipe --> + + <owl:Class rdf:about="&foo;parallelpipe"> + <owl:equivalentClass> + <owl:Class> + <owl:unionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&foo;wedge"/> + <rdf:Description rdf:about="&foo;brick"/> + </owl:unionOf> + </owl:Class> + </owl:equivalentClass> + </owl:Class> + + + + <!-- http://localhost/foo#pillar --> + + <owl:Class rdf:about="&foo;pillar"/> + + + + <!-- http://localhost/foo#wedge --> + + <owl:Class rdf:about="&foo;wedge"/> + + + + <!-- http://www.w3.org/2002/07/owl#Nothing --> + + <owl:Class rdf:about="&owl;Nothing"> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&foo;pillar"/> + <rdf:Description rdf:about="&foo;parallelpipe"/> + </owl:intersectionOf> + </owl:Class> + </owl:equivalentClass> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&foo;construction"/> + <rdf:Description rdf:about="&foo;parallelpipe"/> + </owl:intersectionOf> + </owl:Class> + </owl:equivalentClass> + <owl:equivalentClass> + <owl:Class> + <owl:intersectionOf rdf:parseType="Collection"> + <rdf:Description rdf:about="&foo;construction"/> + <rdf:Description rdf:about="&foo;pillar"/> + </owl:intersectionOf> + </owl:Class> + </owl:equivalentClass> + </owl:Class> + + + + <!-- http://www.w3.org/2002/07/owl#Thing --> + + <owl:Class rdf:about="&owl;Thing"/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Individuals + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://localhost/foo#a --> + + <foo:brick rdf:about="&foo;a"/> + + + + <!-- http://localhost/foo#b --> + + <foo:brick rdf:about="&foo;b"/> + + + + <!-- http://localhost/foo#c --> + + <foo:brick rdf:about="&foo;c"/> + + + + <!-- http://localhost/foo#c1 --> + + <foo:construction rdf:about="&foo;c1"> + <foo:hasParallelpipe rdf:resource="&foo;a"/> + <foo:hasPillar rdf:resource="&foo;p2"/> + <foo:hasPillar rdf:resource="&foo;p1"/> + </foo:construction> + + + + <!-- http://localhost/foo#c2 --> + + <foo:construction rdf:about="&foo;c2"> + <foo:hasPillar rdf:resource="&foo;p3"/> + <foo:hasParallelpipe rdf:resource="&foo;b"/> + <foo:hasPillar rdf:resource="&foo;p4"/> + </foo:construction> + + + + <!-- http://localhost/foo#c3 --> + + <foo:construction rdf:about="&foo;c3"> + <foo:hasPillar rdf:resource="&foo;p5"/> + <foo:hasParallelpipe rdf:resource="&foo;c"/> + <foo:hasPillar rdf:resource="&foo;p6"/> + </foo:construction> + + + + <!-- http://localhost/foo#c4 --> + + <foo:construction rdf:about="&foo;c4"> + <foo:hasParallelpipe rdf:resource="&foo;d"/> + <foo:hasPillar rdf:resource="&foo;p8"/> + <foo:hasPillar rdf:resource="&foo;p7"/> + </foo:construction> + + + + <!-- http://localhost/foo#c5 --> + + <foo:construction rdf:about="&foo;c5"> + <foo:hasPillar rdf:resource="&foo;p9"/> + <foo:hasParallelpipe rdf:resource="&foo;e"/> + </foo:construction> + + + + <!-- http://localhost/foo#d --> + + <foo:wedge rdf:about="&foo;d"/> + + + + <!-- http://localhost/foo#e --> + + <foo:wedge rdf:about="&foo;e"/> + + + + <!-- http://localhost/foo#p1 --> + + <foo:pillar rdf:about="&foo;p1"> + <rdf:type rdf:resource="&foo;freeStandingPillar"/> + <foo:supports rdf:resource="&foo;a"/> + <foo:leftof rdf:resource="&foo;p2"/> + </foo:pillar> + + + + <!-- http://localhost/foo#p2 --> + + <foo:pillar rdf:about="&foo;p2"> + <rdf:type rdf:resource="&foo;freeStandingPillar"/> + <foo:supports rdf:resource="&foo;a"/> + </foo:pillar> + + + + <!-- http://localhost/foo#p3 --> + + <foo:freeStandingPillar rdf:about="&foo;p3"> + <rdf:type rdf:resource="&foo;pillar"/> + <foo:leftof rdf:resource="&foo;p4"/> + </foo:freeStandingPillar> + + + + <!-- http://localhost/foo#p4 --> + + <foo:freeStandingPillar rdf:about="&foo;p4"> + <rdf:type rdf:resource="&foo;pillar"/> + </foo:freeStandingPillar> + + + + <!-- http://localhost/foo#p5 --> + + <foo:pillar rdf:about="&foo;p5"> + <foo:supports rdf:resource="&foo;c"/> + <foo:touches rdf:resource="&foo;p6"/> + <foo:leftof rdf:resource="&foo;p6"/> + </foo:pillar> + + + + <!-- http://localhost/foo#p6 --> + + <foo:pillar rdf:about="&foo;p6"> + <foo:supports rdf:resource="&foo;c"/> + </foo:pillar> + + + + <!-- http://localhost/foo#p7 --> + + <foo:freeStandingPillar rdf:about="&foo;p7"> + <rdf:type rdf:resource="&foo;pillar"/> + <foo:leftof rdf:resource="&foo;p8"/> + <foo:supports rdf:resource="&foo;d"/> + </foo:freeStandingPillar> + + + + <!-- http://localhost/foo#p8 --> + + <foo:freeStandingPillar rdf:about="&foo;p8"> + <rdf:type rdf:resource="&foo;pillar"/> + <foo:supports rdf:resource="&foo;d"/> + </foo:freeStandingPillar> + + + + <!-- http://localhost/foo#p9 --> + + <foo:freeStandingPillar rdf:about="&foo;p9"> + <rdf:type rdf:resource="&foo;pillar"/> + <foo:supports rdf:resource="&foo;e"/> + </foo:freeStandingPillar> +</rdf:RDF> Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java 2008-03-03 15:49:03 UTC (rev 676) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java 2008-03-03 19:38:48 UTC (rev 677) @@ -43,7 +43,7 @@ import org.dllearner.core.owl.ObjectProperty; import org.dllearner.learningproblems.PosNegLP; import org.dllearner.learningproblems.PosOnlyDefinitionLP; -import org.dllearner.refinementoperators.RhoDown; +import org.dllearner.refinementoperators.RhoDRDown; import org.dllearner.utilities.Files; import org.dllearner.utilities.Helper; @@ -54,8 +54,6 @@ * passes them to the actual refinement operator, heuristic, and * learning algorithm implementations. * - * Note: The component is not working yet. - * * Note: The options supported by the ROLearner component and this * one are not equal. Options that have been dropped for now: * - horizontal expansion factor: The goal of the algorithm will @@ -106,6 +104,7 @@ private boolean useNegation = true; private boolean useBooleanDatatypes = true; private double noisePercentage = 0.0; + private NamedClass startClass = null; // Variablen zur Einstellung der Protokollierung // boolean quiet = false; @@ -167,6 +166,7 @@ noisePercentage.setLowerLimit(0); noisePercentage.setUpperLimit(100); options.add(noisePercentage); + options.add(new StringConfigOption("startClass", "the named class which should be used to start the algorithm (GUI: needs a widget for selecting a class)")); return options; } @@ -219,8 +219,9 @@ noisePercentage = (Double) entry.getValue(); } else if(name.equals("useBooleanDatatypes")) { useBooleanDatatypes = (Boolean) entry.getValue(); + } else if(name.equals("startClass")) { + startClass = new NamedClass((String)entry.getValue()); } - } /* (non-Javadoc) @@ -276,19 +277,21 @@ if(improveSubsumptionHierarchy) rs.getSubsumptionHierarchy().improveSubsumptionHierarchy(); rs.prepareRoleHierarchy(usedRoles); + rs.prepareDatatypePropertyHierarchy(); // create a refinement operator and pass all configuration // variables to it - RhoDown operator = new RhoDown( - rs, - applyAllFilter, - applyExistsFilter, - useAllConstructor, - useExistsConstructor, - useNegation, - useBooleanDatatypes - ); - + RhoDRDown operator = new RhoDRDown( + rs, + applyAllFilter, + applyExistsFilter, + useAllConstructor, + useExistsConstructor, + useNegation, + useBooleanDatatypes, + startClass + ); + // create an algorithm object and pass all configuration // options to it algorithm = new ExampleBasedROLearner( Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java 2008-03-03 15:49:03 UTC (rev 676) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java 2008-03-03 19:38:48 UTC (rev 677) @@ -42,7 +42,7 @@ import org.dllearner.core.owl.Thing; import org.dllearner.learningproblems.PosNegLP; import org.dllearner.learningproblems.PosOnlyDefinitionLP; -import org.dllearner.refinementoperators.RhoDown; +import org.dllearner.refinementoperators.RhoDRDown; import org.dllearner.utilities.ConceptComparator; import org.dllearner.utilities.ConceptTransformation; import org.dllearner.utilities.Files; @@ -107,7 +107,8 @@ private List<Description> solutions = new LinkedList<Description>(); // used refinement operator and heuristic (exchangeable) - private RhoDown operator; + private RhoDRDown operator; +// private RefinementOperator operator; // private ExampleBasedHeuristic heuristic; // specifies whether to compute and log benchmark information @@ -195,7 +196,7 @@ } nrOfExamples = nrOfPositiveExamples + nrOfNegativeExamples; this.rs = rs; - this.operator = (RhoDown) operator; + this.operator = (RhoDRDown) operator; // initialise candidate set with heuristic as ordering candidates = new TreeSet<ExampleBasedNode>(heuristic); this.noise = noise; @@ -568,7 +569,7 @@ tooWeakList.add(refinement); } else { // Lösung gefunden - if(quality >= 0 && quality<allowedMisclassifications) { + if(quality >= 0 && quality<=allowedMisclassifications) { solutionFound = true; solutions.add(refinement); } Modified: trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java 2008-03-03 15:49:03 UTC (rev 676) +++ trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java 2008-03-03 19:38:48 UTC (rev 677) @@ -279,6 +279,11 @@ + description + " unsupported."); } + @Override + public SortedSet<Individual> retrieval(Description concept) { + return rs.retrieval(concept); + } + /* * (non-Javadoc) * Modified: trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java =================================================================== --- trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java 2008-03-03 15:49:03 UTC (rev 676) +++ trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java 2008-03-03 19:38:48 UTC (rev 677) @@ -97,8 +97,8 @@ private Map<Integer, List<List<Integer>>> combos = new HashMap<Integer, List<List<Integer>>>(); // refinements of the top concept ordered by length - private Map<Integer, SortedSet<? extends Description>> topRefinements = new TreeMap<Integer, SortedSet<? extends Description>>(); - private Map<NamedClass,Map<Integer, SortedSet<? extends Description>>> topARefinements = new TreeMap<NamedClass,Map<Integer, SortedSet<? extends Description>>>(); + private Map<Integer, SortedSet<Description>> topRefinements = new TreeMap<Integer, SortedSet<Description>>(); + private Map<NamedClass,Map<Integer, SortedSet<Description>>> topARefinements = new TreeMap<NamedClass,Map<Integer, SortedSet<Description>>>(); // cumulated refinements of top (all from length one to the specified length) private Map<Integer, TreeSet<Description>> topRefinementsCumulative = new HashMap<Integer, TreeSet<Description>>(); @@ -119,8 +119,8 @@ private ConceptComparator conceptComparator = new ConceptComparator(); // Statistik - private long mComputationTimeNs = 0; - private long topComputationTimeNs = 0; + public long mComputationTimeNs = 0; + public long topComputationTimeNs = 0; private boolean applyAllFilter = true; private boolean applyExistsFilter = true; @@ -129,12 +129,20 @@ private boolean useNegation = true; private boolean useBooleanDatatypes = true; - public RhoDRDown(ReasoningService rs) { - this(rs, null); + public RhoDRDown(ReasoningService reasoningService) { + this(reasoningService, true, true, true, true, true, true, null); } - public RhoDRDown(ReasoningService rs, NamedClass startClass) { - this.rs = rs; + public RhoDRDown(ReasoningService reasoningService, boolean applyAllFilter, boolean applyExistsFilter, boolean useAllConstructor, + boolean useExistsConstructor, boolean useNegation, boolean useBooleanDatatypes, NamedClass startClass) { + this.rs = reasoningService; + this.applyAllFilter = applyAllFilter; + this.applyExistsFilter = applyExistsFilter; + this.useAllConstructor = useAllConstructor; + this.useExistsConstructor = useExistsConstructor; + this.useNegation = useNegation; + this.useBooleanDatatypes = useBooleanDatatypes; + subHierarchy = rs.getSubsumptionHierarchy(); // query reasoner for domains and ranges @@ -169,6 +177,14 @@ @SuppressWarnings({"unchecked"}) public Set<Description> refine(Description description, int maxLength, List<Description> knownRefinements, Description currDomain) { + +// System.out.println(description + " " + currDomain); + + // actions needing to be performed if this is the first time the + // current domain is used + if(!(currDomain instanceof Thing) && !topARefinementsLength.containsKey(currDomain)) + topARefinementsLength.put((NamedClass)currDomain, 0); + // TODO: check whether using list or set makes more sense // here; and whether HashSet or TreeSet should be used Set<Description> refinements = new TreeSet<Description>(conceptComparator); @@ -184,8 +200,8 @@ refinements = (TreeSet<Description>) topRefinementsCumulative.get(maxLength).clone(); } else { if(maxLength>topARefinementsLength.get(currDomain)) - computeTopRefinements(maxLength); - refinements = (TreeSet<Description>) topRefinementsCumulative.get(maxLength).clone(); + computeTopRefinements(maxLength, (NamedClass) currDomain); + refinements = (TreeSet<Description>) topARefinementsCumulative.get(currDomain).get(maxLength).clone(); } // refinements.addAll(subHierarchy.getMoreSpecialConcepts(description)); @@ -303,9 +319,10 @@ int topRefLength = maxLength - description.getLength() - 1; // maybe we have to compute new top refinements here - if(currDomain instanceof Thing && topRefLength > topRefinementsLength) - computeTopRefinements(topRefLength); - else if(topRefLength > topARefinementsLength.get(currDomain)) + if(currDomain instanceof Thing) { + if(topRefLength > topRefinementsLength) + computeTopRefinements(topRefLength); + } else if(topRefLength > topARefinementsLength.get(currDomain)) computeTopRefinements(topRefLength,(NamedClass)currDomain); if(topRefLength>0) { @@ -366,24 +383,48 @@ if(domain != null && !mA.containsKey(domain)) computeM(domain); + int refinementsLength; + + if(domain == null) { + refinementsLength = topRefinementsLength; + } else { + if(!topARefinementsLength.containsKey(domain)) + topARefinementsLength.put(domain,0); + + refinementsLength = topARefinementsLength.get(domain); + } + // compute all possible combinations of the disjunction - int refinementsLength = (domain == null) ? topRefinementsLength : topARefinementsLength.get(domain); for(int i = refinementsLength+1; i <= maxLength; i++) { combos.put(i,MathOperations.getCombos(i, mMaxLength)); + // initialise the refinements with empty sets + if(domain == null) { + topRefinements.put(i, new TreeSet<Description>(conceptComparator)); + } else { + if(!topARefinements.containsKey(domain)) + topARefinements.put(domain, new TreeMap<Integer,SortedSet<Description>>()); + topARefinements.get(domain).put(i, new TreeSet<Description>(conceptComparator)); + } + for(List<Integer> combo : combos.get(i)) { // combination is a single number => try to use M if(combo.size()==1) { + // note we cannot use "put" instead of "addAll" because there + // can be several combos for one length if(domain == null) - topRefinements.put(i,m.get(i)); + topRefinements.get(i).addAll(m.get(i)); else - topARefinements.get(domain).put(i,mA.get(domain).get(i)); + topARefinements.get(domain).get(i).addAll(mA.get(domain).get(i)); // combinations has several numbers => generate disjunct } else { SortedSet<Union> baseSet = new TreeSet<Union>(conceptComparator); for(Integer j : combo) { - baseSet = MathOperations.incCrossProduct(baseSet, m.get(j)); + if(domain == null) + baseSet = MathOperations.incCrossProduct(baseSet, m.get(j)); + else + baseSet = MathOperations.incCrossProduct(baseSet, mA.get(domain).get(j)); } // convert all concepts in ordered negation normal form @@ -405,9 +446,9 @@ // add computed refinements if(domain == null) - topRefinements.put(new Integer(i), baseSet); + topRefinements.get(i).addAll(baseSet); else - topARefinements.get(domain).put(new Integer(i), baseSet); + topARefinements.get(domain).get(i).addAll(baseSet); } } @@ -415,15 +456,20 @@ // be accessed easily TreeSet<Description> cumulativeRefinements = new TreeSet<Description>(conceptComparator); for(int j=1; j<=i; j++) { - if(domain == null) + if(domain == null) { cumulativeRefinements.addAll(topRefinements.get(j)); - else + } else { cumulativeRefinements.addAll(topARefinements.get(domain).get(j)); + } } - if(domain == null) + + if(domain == null) { topRefinementsCumulative.put(i, cumulativeRefinements); - else + } else { + if(!topARefinementsCumulative.containsKey(domain)) + topARefinementsCumulative.put(domain, new TreeMap<Integer, TreeSet<Description>>()); topARefinementsCumulative.get(domain).put(i, cumulativeRefinements); + } } // register new top refinements length @@ -495,12 +541,12 @@ mA.put(nc, new TreeMap<Integer,SortedSet<Description>>()); // initialise all possible lengths (1 to 3) - for(int i=1; i<=3; i++) { + for(int i=1; i<=mMaxLength; i++) { mA.get(nc).put(i, new TreeSet<Description>(conceptComparator)); } SortedSet<Description> m1 = rs.getMoreSpecialConcepts(nc); - m.put(1,m1); + mA.get(nc).put(1,m1); if(useNegation) { // the definition in the paper is more complex, but acutally @@ -516,11 +562,11 @@ m2.add(c); else { NamedClass a = (NamedClass) c; - if(!isNotADisjoint(a, nc) && !isNotAMeaningFul(a, nc)) + if(!isNotADisjoint(a, nc) && isNotAMeaningFul(a, nc)) m2.add(new Negation(a)); } } - m.put(2,m2); + mA.get(nc).put(2,m2); } // compute applicable properties @@ -551,7 +597,7 @@ } } - m.put(3,m3); + mA.get(nc).put(3,m3); mComputationTimeNs += System.nanoTime() - mComputationTimeStartNs; } @@ -560,6 +606,12 @@ // compute the applicable properties if this has not been done yet if(appOP.get(domain) == null) computeApp(domain); + + // initialise mgr, mgbd, mgdd + mgr.put(domain, new TreeSet<ObjectProperty>()); + mgbd.put(domain, new TreeSet<DatatypeProperty>()); + mgdd.put(domain, new TreeSet<DatatypeProperty>()); + SortedSet<ObjectProperty> mostGeneral = rs.getMostGeneralRoles(); computeMgrRecursive(domain, mostGeneral, mgr.get(domain)); SortedSet<DatatypeProperty> mostGeneralDP = rs.getMostGeneralDatatypeProperties(); @@ -606,8 +658,8 @@ for(ObjectProperty role : mostGeneral) { // TODO: currently we just rely on named classes as roles, // instead of computing dom(r) and ran(r) - NamedClass nc = (NamedClass) rs.getDomain(role); - if(!isDisjoint(domain,nc)) + Description d = rs.getDomain(role); + if(!isDisjoint(domain,d)) applicableRoles.add(role); } appOP.put(domain, applicableRoles); @@ -616,8 +668,8 @@ Set<DatatypeProperty> mostGeneralBDPs = rs.getBooleanDatatypeProperties(); Set<DatatypeProperty> applicableBDPs = new TreeSet<DatatypeProperty>(); for(DatatypeProperty role : mostGeneralBDPs) { - NamedClass nc = (NamedClass) rs.getDomain(role); - if(!isDisjoint(domain,nc)) + Description d = (NamedClass) rs.getDomain(role); + if(!isDisjoint(domain,d)) applicableBDPs.add(role); } appBD.put(domain, applicableBDPs); @@ -626,8 +678,8 @@ Set<DatatypeProperty> mostGeneralDDPs = rs.getBooleanDatatypeProperties(); Set<DatatypeProperty> applicableDDPs = new TreeSet<DatatypeProperty>(); for(DatatypeProperty role : mostGeneralDDPs) { - NamedClass nc = (NamedClass) rs.getDomain(role); - if(!isDisjoint(domain,nc)) + Description d = (NamedClass) rs.getDomain(role); + if(!isDisjoint(domain,d)) applicableDDPs.add(role); } appDD.put(domain, applicableDDPs); @@ -637,10 +689,10 @@ // by the reasoner only ones and otherwise taken from a matrix // => this has low importance in the long run, because M is cached anyway, // but avoids many duplicate queries when computing M - private boolean isDisjoint(NamedClass a, NamedClass b) { + private boolean isDisjoint(NamedClass a, Description d) { // we need to test whether A AND B is equivalent to BOTTOM - Description d = new Intersection(a, b); - return rs.subsumes(new Nothing(), d); + Description d2 = new Intersection(a, d); + return rs.subsumes(new Nothing(), d2); } // we need to test whether NOT A AND B is equivalent to BOTTOM @@ -656,7 +708,8 @@ private boolean isNotAMeaningFul(NamedClass a, NamedClass b) { Description notA = new Negation(a); Description d = new Intersection(notA, b); - return !rs.subsumes(b, d); + // check b subClassOf b AND NOT A (if yes then it is not meaningful) + return !rs.subsumes(d, b); } } \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ton...@us...> - 2008-03-04 03:55:05
|
Revision: 680 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=680&view=rev Author: tonytacker Date: 2008-03-03 19:54:48 -0800 (Mon, 03 Mar 2008) Log Message: ----------- I made statistics in an extra thread. It will be auto updated every 5 seconds. /examples/test.conf was generated and can be overwritten, deleted or what you want. This file is not necessary but useful. Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/gui/RunPanel.java Added Paths: ----------- trunk/examples/test.conf trunk/src/dl-learner/org/dllearner/gui/ThreadStatistics.java Added: trunk/examples/test.conf =================================================================== --- trunk/examples/test.conf (rev 0) +++ trunk/examples/test.conf 2008-03-04 03:54:48 UTC (rev 680) @@ -0,0 +1,7 @@ +import("father.kb"); + ++"http://localhost/foo#bernd" ++"http://localhost/foo#gabi" + +-"http://localhost/foo#markus" + Modified: trunk/src/dl-learner/org/dllearner/gui/RunPanel.java =================================================================== --- trunk/src/dl-learner/org/dllearner/gui/RunPanel.java 2008-03-03 23:51:39 UTC (rev 679) +++ trunk/src/dl-learner/org/dllearner/gui/RunPanel.java 2008-03-04 03:54:48 UTC (rev 680) @@ -35,13 +35,12 @@ private static final long serialVersionUID = 1643304576470046636L; - private JButton runButton, stopButton, getBestSolutionButton, getSolutionScoreButton, - getReasonerStatsButton; + private JButton runButton, stopButton; private JTextArea infoArea; private Config config; private ThreadRun thread; - private Boolean runBoolean = new Boolean(false); + // private Boolean runBoolean = new Boolean(false); private JPanel showPanel = new JPanel(); private JPanel infoPanel = new JPanel(); @@ -57,15 +56,6 @@ stopButton = new JButton("Stop"); stopButton.addActionListener(this); - getBestSolutionButton = new JButton("GetBestSolution"); - getBestSolutionButton.addActionListener(this); - - getSolutionScoreButton = new JButton("GetSolutionScore"); - getSolutionScoreButton.addActionListener(this); - - getReasonerStatsButton = new JButton("GetReasonerStats"); - getReasonerStatsButton.addActionListener(this); - infoArea = new JTextArea(20, 50); JScrollPane infoScroll = new JScrollPane(infoArea); @@ -74,10 +64,6 @@ infoPanel.add(infoScroll); - solutionPanel.add(getBestSolutionButton); - solutionPanel.add(getSolutionScoreButton); - solutionPanel.add(getReasonerStatsButton); - add(showPanel, BorderLayout.PAGE_START); add(infoPanel, BorderLayout.CENTER); add(solutionPanel, BorderLayout.PAGE_END); @@ -90,48 +76,50 @@ thread = new ThreadRun(config); config.getReasoningService().resetStatistics(); thread.start(); - this.runBoolean = true; + // this.runBoolean = true; + ThreadStatistics threadStatistics = new ThreadStatistics(config, this); + threadStatistics.start(); } // stop if (e.getSource() == stopButton && config.getLearningAlgorithm() != null) { thread.exit(); } - // getBestSolution - if (e.getSource() == getBestSolutionButton && runBoolean) { - if (config.getLearningAlgorithm().getBestSolution() != null) - infoArea.setText(config.getLearningAlgorithm().getBestSolution().toString()); - } - // getSolutionScore - if (e.getSource() == getSolutionScoreButton && runBoolean) { - if (config.getLearningAlgorithm().getSolutionScore() != null) - infoArea.setText(config.getLearningAlgorithm().getSolutionScore().toString()); - } - // ReasonerStats - if (e.getSource() == getReasonerStatsButton && runBoolean) { - infoArea.setText(""); - if (config.getAlgorithmRunTime() != null) - infoArea.append("Algorithm Runtime: " + makeTime(config.getAlgorithmRunTime()) - + "\n"); - infoArea.append("OverallReasoningTime: " - + makeTime(config.getReasoningService().getOverallReasoningTimeNs()) + "\n"); - infoArea.append("Instances (" + config.getReasoningService().getNrOfInstanceChecks() - + "): "); - if (config.getReasoningService().getNrOfInstanceChecks() > 0) - infoArea.append(makeTime(config.getReasoningService().getTimePerInstanceCheckNs()) - + "\n"); - else - infoArea.append(" - \n"); - infoArea.append("Retrieval (" + config.getReasoningService().getNrOfRetrievals() - + "): "); - if (config.getReasoningService().getNrOfRetrievals() > 0) - infoArea.append(makeTime(config.getReasoningService().getTimePerRetrievalNs()) - + "\n"); - else - infoArea.append(" - \n"); + } + + /** + * Show Statistics. + */ + public void showStats() { + infoArea.setText(""); + // best solution + if (config.getLearningAlgorithm().getBestSolution() != null) + infoArea.append("BestSolution:\n" + + config.getLearningAlgorithm().getBestSolution().toString() + "\n\n"); + // solution score +// if (config.getLearningAlgorithm().getSolutionScore() != null) +// infoArea.append("SolutionScore:\n" +// + config.getLearningAlgorithm().getSolutionScore().toString() + "\n\n"); + // reasoner statistics + if (config.getAlgorithmRunTime() != null) + infoArea.append("Algorithm Runtime: " + makeTime(config.getAlgorithmRunTime()) + "\n"); + infoArea.append("OverallReasoningTime: " + + makeTime(config.getReasoningService().getOverallReasoningTimeNs()) + "\n"); + infoArea.append("Instances (" + config.getReasoningService().getNrOfInstanceChecks() + + "): "); + if (config.getReasoningService().getNrOfInstanceChecks() > 0) + infoArea.append(makeTime(config.getReasoningService().getTimePerInstanceCheckNs()) + + "\n"); + else + infoArea.append(" - \n"); + infoArea.append("Retrieval (" + config.getReasoningService().getNrOfRetrievals() + "): "); + if (config.getReasoningService().getNrOfRetrievals() > 0) + infoArea.append(makeTime(config.getReasoningService().getTimePerRetrievalNs()) + "\n"); + else + infoArea.append(" - \n"); + if (config.getReasoningService().getNrOfSubsumptionChecks() > 0) infoArea.append("Subsumption (" + config.getReasoningService().getNrOfSubsumptionChecks() + "): " + makeTime(config.getReasoningService().getTimePerSubsumptionCheckNs()) + "\n"); - } } /** @@ -142,6 +130,8 @@ * @return a string like this: 3h 10min 46s 753ms */ public String makeTime(Long nanoSeconds) { + if (nanoSeconds == null) + return null; Long hours = 0L, minutes = 0L, seconds = 0L, millis = 0L, mikros = 0L, nanos = 0L; String time = ""; Added: trunk/src/dl-learner/org/dllearner/gui/ThreadStatistics.java =================================================================== --- trunk/src/dl-learner/org/dllearner/gui/ThreadStatistics.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/gui/ThreadStatistics.java 2008-03-04 03:54:48 UTC (rev 680) @@ -0,0 +1,55 @@ +package org.dllearner.gui; + +/** + * Copyright (C) 2007-2008, Jens Lehmann + * + * This file is part of DL-Learner. + * + * DL-Learner is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 3 of the License, or + * (at your option) any later version. + * + * DL-Learner is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + * + */ + +/** + * Start statistics in a new thread. + * + * @author Tilo Hielscher + */ +public class ThreadStatistics extends Thread { + + Config config; + RunPanel runPanel; + + public ThreadStatistics(Config config, RunPanel runPanel) { + this.config = config; + this.runPanel = runPanel; + } + + /** + * method to start thread + */ + @Override + public void run() { + if (config.getThreadIsRunning()) { + while (config.getThreadIsRunning()) { + try { + runPanel.showStats(); + sleep(5000); // sleep 5 seconds + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + runPanel.showStats(); + } + } +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-03-04 19:11:41
|
Revision: 687 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=687&view=rev Author: jenslehmann Date: 2008-03-04 11:10:55 -0800 (Tue, 04 Mar 2008) Log Message: ----------- reasoning bug fixes and extensions Modified Paths: -------------- trunk/examples/commandcollection.txt trunk/src/dl-learner/org/dllearner/core/ReasonerComponent.java trunk/src/dl-learner/org/dllearner/core/owl/TypedConstant.java trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java Modified: trunk/examples/commandcollection.txt =================================================================== --- trunk/examples/commandcollection.txt 2008-03-04 13:40:00 UTC (rev 686) +++ trunk/examples/commandcollection.txt 2008-03-04 19:10:55 UTC (rev 687) @@ -66,3 +66,4 @@ import("http://dbpedia.openlinksw.com:8890/sparql","SPARQL"); sparql.instances = {}; +posNegDefinition.percentPerLengthUnit = 0.10; \ No newline at end of file Modified: trunk/src/dl-learner/org/dllearner/core/ReasonerComponent.java =================================================================== --- trunk/src/dl-learner/org/dllearner/core/ReasonerComponent.java 2008-03-04 13:40:00 UTC (rev 686) +++ trunk/src/dl-learner/org/dllearner/core/ReasonerComponent.java 2008-03-04 19:10:55 UTC (rev 687) @@ -149,6 +149,22 @@ return ret; } + // convenience method returning those values which have value "false" for this + // datatype property + public SortedSet<Individual> getFalseDatatypeMembers(DatatypeProperty datatypeProperty) throws ReasoningMethodUnsupportedException { + Map<Individual, SortedSet<Constant>> mapping = getDatatypeMembers(datatypeProperty); + SortedSet<Individual> ret = new TreeSet<Individual>(); + for(Entry<Individual, SortedSet<Constant>> e : mapping.entrySet()) { + SortedSet<Constant> values = e.getValue(); + for(Constant c : values) { + boolean v = Boolean.parseBoolean(c.getLiteral()); + if(v == false) + ret.add(e.getKey()); + } + } + return ret; + } + public boolean instanceCheck(Description concept, Individual individual) throws ReasoningMethodUnsupportedException { throw new ReasoningMethodUnsupportedException(); Modified: trunk/src/dl-learner/org/dllearner/core/owl/TypedConstant.java =================================================================== --- trunk/src/dl-learner/org/dllearner/core/owl/TypedConstant.java 2008-03-04 13:40:00 UTC (rev 686) +++ trunk/src/dl-learner/org/dllearner/core/owl/TypedConstant.java 2008-03-04 19:10:55 UTC (rev 687) @@ -27,7 +27,7 @@ * @author Jens Lehmann * */ -public class TypedConstant extends Constant { +public class TypedConstant extends Constant implements Comparable<TypedConstant> { private Datatype datatype; @@ -47,7 +47,8 @@ * @see org.dllearner.core.owl.KBElement#toString(java.lang.String, java.util.Map) */ public String toString(String baseURI, Map<String, String> prefixes) { - return literal + "^^" + datatype; + return literal; +// return literal + "^^" + datatype; } /** @@ -62,6 +63,19 @@ */ public void accept(KBElementVisitor visitor) { visitor.visit(this); + } + + /* (non-Javadoc) + * @see java.lang.Comparable#compareTo(java.lang.Object) + */ + public int compareTo(TypedConstant o) { + // the first criteria is the datatype + int datatypeComparision = datatype.getURI().compareTo(datatype.getURI()); + if(datatypeComparision == 0) { + // the second criterion is the literal value + return literal.compareTo(o.literal); + } else + return datatypeComparision; } } Modified: trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java =================================================================== --- trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java 2008-03-04 13:40:00 UTC (rev 686) +++ trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java 2008-03-04 19:10:55 UTC (rev 687) @@ -204,6 +204,10 @@ // and cannot parser its own generated file // DisjointClassesAxiom disjointAtomTypes = getDisjointClassesAxiom(atomTypes); // kb.addAxiom(disjointAtomTypes); + String[] mainClasses = new String[] {"Compound", "Atom", "Bond", "Structure"}; + Set<String> mainClassesSet = new HashSet<String>(Arrays.asList(mainClasses)); + DisjointClassesAxiom disjointAtomTypes = getDisjointClassesAxiom(mainClassesSet); + kb.addAxiom(disjointAtomTypes); // all different axiom (UNA) // exporting differentIndividuals axioms is broken in OWL API @@ -230,9 +234,12 @@ String confHeader = "import(\"pte.owl\");\n\n"; confHeader += "reasoner = fastInstanceChecker;\n"; confHeader += "algorithm = refexamples;\n"; -// confHeader += "refinement.writeSearchTree = true;"; -// confHeader += "refinement.searchTreeFile = \"log/carcinogenesis/searchTree.log\""; - confHeader += "\n\n"; + confHeader += "refexamples.noisePercentage = 35;\n"; + confHeader += "refexamples.startClass = " + getURI2("Compound") + ";\n"; + confHeader += "refexamples.writeSearchTree = false;\n"; + confHeader += "refexamples.searchTreeFile = \"log/carcinogenesis/searchTree.log\";\n"; + confHeader += "posNegDefinition.percentPerLengthUnit = 0.10;\n"; + confHeader += "\n"; Files.appendFile(confTrainFile, confHeader); // generating training examples @@ -338,7 +345,9 @@ } else if (headName.equals("has_property")) { String compoundName = head.getArgument(0).toPLString(); String testName = head.getArgument(1).toPLString(); - boolean testResult = Boolean.parseBoolean(head.getArgument(2).toPLString()); + String resultStr = head.getArgument(2).toPLString(); + boolean testResult = (resultStr.equals("p")) ? true : false; + // create a new datatype property if it does not exist already if(!tests.contains(testName)) { String axiom1 = "DPDOMAIN(" + getURI2(testName) + ") = " + getURI2("Compound") + ".\n"; @@ -453,7 +462,7 @@ private static DisjointClassesAxiom getDisjointClassesAxiom(Set<String> classes) { Set<Description> descriptions = new HashSet<Description>(); for(String namedClass : classes) - descriptions.add(new NamedClass(namedClass)); + descriptions.add(new NamedClass(getURI(namedClass))); return new DisjointClassesAxiom(descriptions); } Modified: trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java 2008-03-04 13:40:00 UTC (rev 686) +++ trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java 2008-03-04 19:10:55 UTC (rev 687) @@ -56,6 +56,7 @@ import org.dllearner.kb.OWLFile; import org.dllearner.parser.KBParser; import org.dllearner.parser.ParseException; +import org.dllearner.utilities.Helper; /** * Reasoner for fast instance checks. It works by completely dematerialising the @@ -76,6 +77,8 @@ private static Logger logger = Logger.getLogger(FastInstanceChecker.class); + private boolean defaultNegation = true; + private Set<NamedClass> atomicConcepts; private Set<ObjectProperty> atomicRoles; private SortedSet<DatatypeProperty> datatypeProperties; @@ -96,10 +99,9 @@ // object property mappings private Map<ObjectProperty, Map<Individual, SortedSet<Individual>>> opPos = new TreeMap<ObjectProperty, Map<Individual, SortedSet<Individual>>>(); // datatype property mappings - // (for booleans we assume that just one mapping exists, e.g. - // hasValue(object,true) and hasValue(object,false) will - // lead to undefined behaviour (they are logical contradictions) - private Map<DatatypeProperty, SortedSet<Individual>> bd = new TreeMap<DatatypeProperty, SortedSet<Individual>>(); + // we have one mapping for true and false for efficiency reasons + private Map<DatatypeProperty, SortedSet<Individual>> bdPos = new TreeMap<DatatypeProperty, SortedSet<Individual>>(); + private Map<DatatypeProperty, SortedSet<Individual>> bdNeg = new TreeMap<DatatypeProperty, SortedSet<Individual>>(); // for int and double we assume that a property can have several values, // althoug this should be rare, // e.g. hasValue(object,2) and hasValue(object,3) @@ -151,20 +153,39 @@ // FastRetrievalReasoner later) long dematStartTime = System.currentTimeMillis(); - for (NamedClass atomicConcept : rs.getAtomicConcepts()) { - classInstancesPos.put(atomicConcept, rs.retrieval(atomicConcept)); - Negation negatedAtomicConcept = new Negation(atomicConcept); - classInstancesNeg.put(atomicConcept, rs.retrieval(negatedAtomicConcept)); + logger.debug("dematerialising concepts"); + + for (NamedClass atomicConcept : rs.getAtomicConcepts()) { + + SortedSet<Individual> pos = rs.retrieval(atomicConcept); + classInstancesPos.put(atomicConcept, pos); + + if(defaultNegation) { + classInstancesNeg.put(atomicConcept, Helper.difference(individuals,pos)); + } else { + // Pellet needs approximately infinite time to answer negated queries + // on the carcinogenesis data set (and probably others), so we have to + // be careful here + Negation negatedAtomicConcept = new Negation(atomicConcept); + classInstancesNeg.put(atomicConcept, rs.retrieval(negatedAtomicConcept)); + } + + } + logger.debug("dematerialising object properties"); + for (ObjectProperty atomicRole : atomicRoles) { opPos.put(atomicRole, rc.getRoleMembers(atomicRole)); } + logger.debug("dematerialising datatype properties"); + for (DatatypeProperty dp : booleanDatatypeProperties) { - bd.put(dp, rc.getTrueDatatypeMembers(dp)); + bdPos.put(dp, rc.getTrueDatatypeMembers(dp)); + bdNeg.put(dp, rc.getFalseDatatypeMembers(dp)); } - + for (DatatypeProperty dp : intDatatypeProperties) { id.put(dp, rc.getIntDatatypeMembers(dp)); } @@ -269,9 +290,9 @@ if(value) { // check whether the individual is in the set of individuals mapped // to true by this datatype property - return bd.get(dp).contains(individual); + return bdPos.get(dp).contains(individual); } else { - return !bd.get(dp).contains(individual); + return bdNeg.get(dp).contains(individual); } } Modified: trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java 2008-03-04 13:40:00 UTC (rev 686) +++ trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java 2008-03-04 19:10:55 UTC (rev 687) @@ -248,7 +248,8 @@ reasoner = new uk.ac.manchester.cs.factplusplus.owlapi.Reasoner(manager); } catch (Exception e) { e.printStackTrace(); - } + } + System.out.println("Using FaCT++."); } else { // instantiate Pellet reasoner reasoner = new org.mindswap.pellet.owlapi.Reasoner(manager); Modified: trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java =================================================================== --- trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java 2008-03-04 13:40:00 UTC (rev 686) +++ trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java 2008-03-04 19:10:55 UTC (rev 687) @@ -115,7 +115,7 @@ private Map<NamedClass,Set<DatatypeProperty>> mgbd = new TreeMap<NamedClass,Set<DatatypeProperty>>(); private Map<NamedClass,Set<DatatypeProperty>> mgdd = new TreeMap<NamedClass,Set<DatatypeProperty>>(); - // comparator für Konzepte + // concept comparator private ConceptComparator conceptComparator = new ConceptComparator(); // Statistik @@ -129,6 +129,11 @@ private boolean useNegation = true; private boolean useBooleanDatatypes = true; + // caches for reasoner queries +// private Map<NamedClass,Map<NamedClass,Boolean>> abDisjoint = new TreeMap<NamedClass,Map<NamedClass,Boolean>>(); + private Map<NamedClass,Map<NamedClass,Boolean>> notABDisjoint = new TreeMap<NamedClass,Map<NamedClass,Boolean>>(); +// private Map<NamedClass,Map<NamedClass,Boolean>> notABMeaningful = new TreeMap<NamedClass,Map<NamedClass,Boolean>>(); + public RhoDRDown(ReasoningService reasoningService) { this(reasoningService, true, true, true, true, true, true, null); } @@ -562,7 +567,7 @@ m2.add(c); else { NamedClass a = (NamedClass) c; - if(!isNotADisjoint(a, nc) && isNotAMeaningFul(a, nc)) + if(!isNotADisjoint(a, nc) && isNotAMeaningful(a, nc)) m2.add(new Negation(a)); } } @@ -697,19 +702,29 @@ // we need to test whether NOT A AND B is equivalent to BOTTOM private boolean isNotADisjoint(NamedClass a, NamedClass b) { - Description notA = new Negation(a); - Description d = new Intersection(notA, b); - return rs.subsumes(new Nothing(), d); + Map<NamedClass,Boolean> tmp = notABDisjoint.get(a); + Boolean tmp2 = null; + if(tmp != null) + tmp2 = tmp.get(b); + + if(tmp2==null) { + Description notA = new Negation(a); + Description d = new Intersection(notA, b); + Boolean result = rs.subsumes(new Nothing(), d); + // ... add to cache ... + return result; + } else + return tmp2; } // we need to test whether NOT A AND B = B // (if not then NOT A is not meaningful in the sense that it does // not semantically add anything to B) - private boolean isNotAMeaningFul(NamedClass a, NamedClass b) { + private boolean isNotAMeaningful(NamedClass a, NamedClass b) { Description notA = new Negation(a); Description d = new Intersection(notA, b); // check b subClassOf b AND NOT A (if yes then it is not meaningful) return !rs.subsumes(d, b); - } + } } \ No newline at end of file This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-03-05 18:30:01
|
Revision: 689 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=689&view=rev Author: jenslehmann Date: 2008-03-05 10:29:41 -0800 (Wed, 05 Mar 2008) Log Message: ----------- - a lot of bug fixes - new learning algorithm should be usable now Modified Paths: -------------- trunk/examples/arch/arch.conf trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/FlexibleHeuristic.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/LexicographicHeuristic.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/MultiHeuristic.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/NodeComparatorStable.java trunk/src/dl-learner/org/dllearner/core/ReasoningService.java trunk/src/dl-learner/org/dllearner/learningproblems/ScoreTwoValued.java trunk/src/dl-learner/org/dllearner/parser/kb.jj trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java trunk/src/dl-learner/org/dllearner/utilities/ConceptComparator.java Modified: trunk/examples/arch/arch.conf =================================================================== --- trunk/examples/arch/arch.conf 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/examples/arch/arch.conf 2008-03-05 18:29:41 UTC (rev 689) @@ -18,6 +18,8 @@ algorithm = refexamples; reasoner = fastInstanceChecker; +// comment this out if the search should start from construction +// refexamples.startClass = "http://localhost/foo#construction"; // export("arch.owl"); import("arch.kb"); Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java 2008-03-05 18:29:41 UTC (rev 689) @@ -20,6 +20,7 @@ package org.dllearner.algorithms.refexamples; +import java.text.DecimalFormat; import java.util.Set; import java.util.TreeSet; @@ -39,6 +40,8 @@ */ public class ExampleBasedNode { + private static DecimalFormat df = new DecimalFormat(); + // example based variables private Set<Individual> coveredPositives; private Set<Individual> coveredNegatives; @@ -50,7 +53,8 @@ // all properties of a node in the search tree private Description concept; private int horizontalExpansion; - private int coveredNegativeExamples; + // specifies whether the node is too weak (exceeds the max. nr allowed + // misclassifications of positive examples) private boolean isTooWeak; private boolean isQualityEvaluated; private boolean isRedundant; @@ -104,11 +108,12 @@ @Override public String toString() { +// System.out.println(concept); String ret = concept.toString() + " [q:"; if(isTooWeak) ret += "tw"; else - ret += coveredNegativeExamples; + ret += coveredNegatives.size(); ret += ", he:" + horizontalExpansion + ", children:" + children.size() + "]"; return ret; } @@ -124,35 +129,50 @@ } } - public String getTreeString() { - return getTreeString(0).toString(); + public String getTreeString(int nrOfPositiveExamples, int nrOfNegativeExamples) { + return getTreeString(nrOfPositiveExamples, nrOfNegativeExamples, 0,null).toString(); } - private StringBuilder getTreeString(int depth) { + public String getTreeString(int nrOfPositiveExamples, int nrOfNegativeExamples, String baseURI) { + return getTreeString(nrOfPositiveExamples, nrOfNegativeExamples, 0,baseURI).toString(); + } + + private StringBuilder getTreeString(int nrOfPositiveExamples, int nrOfNegativeExamples, int depth, String baseURI) { StringBuilder treeString = new StringBuilder(); for(int i=0; i<depth-1; i++) treeString.append(" "); if(depth!=0) // treeString.append("|-→ "); treeString.append("|--> "); - treeString.append(getShortDescription()+"\n"); + treeString.append(getShortDescription(nrOfPositiveExamples, nrOfNegativeExamples, baseURI)+"\n"); for(ExampleBasedNode child : children) { - treeString.append(child.getTreeString(depth+1)); + treeString.append(child.getTreeString(nrOfPositiveExamples, nrOfNegativeExamples, depth+1,baseURI)); } return treeString; } - private String getShortDescription() { - String ret = concept.toString() + " [q:"; + public String getShortDescription(int nrOfPositiveExamples, int nrOfNegativeExamples, String baseURI) { + String ret = concept.toString(baseURI,null) + " ["; if(isTooWeak) - ret += "tw"; - else - ret += coveredNegativeExamples; + ret += "q:tw"; + else { + double accuracy = 100 * (coveredPositives.size() + nrOfNegativeExamples - coveredNegatives.size())/(double)(nrOfPositiveExamples+nrOfNegativeExamples); + ret += "acc:" + df.format(accuracy) + "% "; + + // comment this out to display the heuristic score with default parameters + double heuristicScore = MultiHeuristic.getNodeScore(this, nrOfPositiveExamples, nrOfNegativeExamples); + ret += "h:" +df.format(heuristicScore) + " "; + + int wrongPositives = nrOfPositiveExamples - coveredPositives.size(); + ret += "q:" + wrongPositives + "p-" + coveredNegatives.size() + "n"; + } - ret += " ("+qualityEvaluationMethod+"), he:" + horizontalExpansion + "]"; + ret += " ("+qualityEvaluationMethod+"), he:" + horizontalExpansion; + ret += " c:" + children.size() + "]"; + return ret; - } + } public Set<Individual> getCoveredPositives() { return coveredPositives; @@ -178,9 +198,6 @@ return qualityEvaluationMethod; } - public int getCoveredNegativeExamples() { - return coveredNegativeExamples; - } public int getHorizontalExpansion() { return horizontalExpansion; } @@ -199,6 +216,6 @@ */ public ExampleBasedNode getParent() { return parent; - } + } } \ No newline at end of file Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLComponent.java 2008-03-05 18:29:41 UTC (rev 689) @@ -85,7 +85,7 @@ private File searchTreeFile; private boolean replaceSearchTree = false; private static String defaultSearchTreeFile = "log/searchTree.txt"; - private String heuristic = "lexicographic"; + private String heuristic = "multi"; Set<NamedClass> allowedConcepts; Set<ObjectProperty> allowedRoles; Set<NamedClass> ignoredConcepts; @@ -241,11 +241,16 @@ if(heuristic == "lexicographic") algHeuristic = new LexicographicHeuristic(); - else { + else if(heuristic == "flexible") { if(learningProblem instanceof PosOnlyDefinitionLP) { throw new RuntimeException("does not work with positive examples only yet"); } algHeuristic = new FlexibleHeuristic(((PosNegLP)learningProblem).getNegativeExamples().size(), ((PosNegLP)learningProblem).getPercentPerLengthUnit()); + } else { + if(learningProblem instanceof PosOnlyDefinitionLP) { + throw new RuntimeException("does not work with positive examples only yet"); + } + algHeuristic = new MultiHeuristic(((PosNegLP)learningProblem).getPositiveExamples().size(),((PosNegLP)learningProblem).getNegativeExamples().size()); } // compute used concepts/roles from allowed/ignored Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java 2008-03-05 18:29:41 UTC (rev 689) @@ -170,6 +170,9 @@ private long evaluateSetCreationTimeNs = 0; private long improperConceptsRemovalTimeNs = 0; + // prefixes + private String baseURI; + public ExampleBasedROLearner( LearningProblem learningProblem, ReasoningService rs, @@ -212,13 +215,14 @@ this.useTooWeakList = useTooWeakList; this.useOverlyGeneralList = useOverlyGeneralList; this.useShortConceptConstruction = useShortConceptConstruction; + baseURI = rs.getBaseURI(); logger.setLevel(Level.DEBUG); } public void start() { // calculate quality threshold required for a solution - allowedMisclassifications = (int) Math.round(noise * nrOfExamples); + allowedMisclassifications = (int) Math.round(noise * nrOfExamples); // start search with start class if(startDescription == null) { @@ -266,15 +270,15 @@ if(writeSearchTree) { // String treeString = ""; - String treeString = "best expanded node: " + bestNode+ "\n"; + String treeString = "best node: " + bestNode+ "\n"; if(expandedNodes.size()>1) { - treeString += "all expanded nodes:\n"; // due to minimum horizontal expansion:\n"; + treeString += "all expanded nodes:\n"; for(ExampleBasedNode n : expandedNodes) { treeString += " " + n + "\n"; } } expandedNodes.clear(); - treeString += startNode.getTreeString(); + treeString += startNode.getTreeString(nrOfPositiveExamples, nrOfNegativeExamples, baseURI); treeString += "\n"; if(replaceSearchTree) @@ -291,6 +295,7 @@ } if(solutionFound) { + logger.info("best node " + candidatesStable.last().getShortDescription(nrOfPositiveExamples, nrOfNegativeExamples, baseURI)); logger.info("\nsolutions:"); for(Description c : solutions) { logger.info(" " + c + " (length " + c.getLength() +", depth " + c.getDepth() + ")"); @@ -305,10 +310,9 @@ System.out.println("Algorithm terminated succesfully."); } + // we apply the operator recursively until all proper refinements up + // to the maxmimum length are reached private void extendNodeProper(ExampleBasedNode node, int maxLength) { - // Rekursionsanfang ist das Konzept am Knoten selbst; danach wird der Operator - // so lange darauf angewandt bis alle proper refinements bis zu maxLength - // gefunden wurden long propCalcNsStart = System.nanoTime(); if(writeSearchTree) @@ -317,54 +321,27 @@ if(node.getChildren().size()>maxNrOfChildren) maxNrOfChildren = node.getChildren().size(); - // Knoten in instabiler Menge muss aktualisiert werden - // => wird jetzt schon vom Algorithmus entfernt - /* - boolean remove = candidates.remove(node); - - if(!remove) { - System.out.println(candidates); - System.out.println(candidatesStable); - System.out.println(node); - - throw new RuntimeException("remove failed"); - }*/ - extendNodeProper(node, node.getConcept(), maxLength, 0); node.setHorizontalExpansion(maxLength); - // wird jetzt schon im Kernalgorithmus hinzugefügt - /* - boolean add = candidates.add(node); - if(!add) { - throw new RuntimeException("add failed"); - }*/ - - // Knoten wird entfernt und wieder hinzugefügt, da sich seine - // Position geändert haben könnte => geht noch nicht wg. ConcurrentModification - // falls Knoten wg. min. horiz. exp. expandiert werden - // candidates.remove(node); - // candidates.add(node); propernessCalcTimeNs += (System.nanoTime()-propCalcNsStart); } - - - // für alle proper refinements von concept bis maxLength werden Kinderknoten - // für node erzeugt; - // recDepth dient nur zur Protokollierung + // for all refinements of concept up to max length, we check whether they are properr + // and call the method recursively if not + // recDepth is used only for statistics private void extendNodeProper(ExampleBasedNode node, Description concept, int maxLength, int recDepth) { - // führe Methode nicht aus, wenn Algorithmus gestoppt wurde (alle rekursiven Funktionsaufrufe - // werden nacheinander abgebrochen, so dass ohne weitere Reasoninganfragen relativ schnell beendet wird) + // do not execute methods if algorithm has been stopped (this means that the algorithm + // will terminate without further reasoning queries) if(stop) return; if(recDepth > maxRecDepth) maxRecDepth = recDepth; - // Refinements berechnen => hier dürfen dürfen refinements <= horizontal expansion - // des Konzepts nicht gelöscht werden! + // compute refinements => we must not delete refinements with low horizontal expansion, + // because they are used in recursive calls of this method later on long refinementCalcTimeNsStart = System.nanoTime(); Set<Description> refinements = operator.refine(concept, maxLength, null); refinementCalcTimeNs += System.nanoTime() - refinementCalcTimeNsStart; @@ -393,18 +370,13 @@ while(it.hasNext()) { Description refinement = it.next(); if(refinement.getLength()>node.getHorizontalExpansion()) { - // TODO: an dieser Stelle könnte man Algorithmen ansetzen lassen, die - // versuchen properness-Anfragen zu vermeiden: - // 1. Konzept kürzen und schauen, ob es Mutterkonzept entspricht - // 2. Blacklist, die überprüft, ob Konzept too weak ist - // (dann ist es auch proper) - // sagt aus, ob festgestellt wurde, ob refinement proper ist // (sagt nicht aus, dass das refinement proper ist!) boolean propernessDetected = false; // 1. short concept construction if(useShortConceptConstruction) { + // kurzes Konzept konstruieren Description shortConcept = ConceptTransformation.getShortConcept(refinement, conceptComparator); int n = conceptComparator.compare(shortConcept, concept); @@ -503,9 +475,11 @@ quality = getNumberOfNegatives(); qualityKnown = true; newNode.setQualityEvaluationMethod(ExampleBasedNode.QualityEvaluationMethod.OVERLY_GENERAL_LIST); + newNode.setCoveredExamples(learningProblem.getPositiveExamples(), learningProblem.getNegativeExamples()); } + } - + // Qualität des Knotens auswerten if(!qualityKnown) { long propCalcReasoningStart2 = System.nanoTime(); @@ -519,23 +493,23 @@ // calculate how many pos. examples are not covered by the // parent node of the refinement - int misclassifications = nrOfPositiveExamples - coveredPositives.size(); + int misclassifiedPositives = nrOfPositiveExamples - coveredPositives.size(); // iterate through all covered examples (examples which are not - // covered do not need to be tested, because they remain uncovered) - // TODO: DIG will be slow if we send each reasoner request individually + // covered do not need to be tested, because they remain uncovered); + // DIG will be slow if we send each reasoner request individually // (however if we send everything in one request, too many instance checks - // are performed => rely on fast instance checker [still to implement]) + // are performed => rely on fast instance checker) for(Individual i : coveredPositives) { // TODO: move code to a separate function if(quality != -1) { boolean covered = rs.instanceCheck(refinement, i); if(!covered) - misclassifications++; + misclassifiedPositives++; else newlyCoveredPositives.add(i); - if(misclassifications > allowedMisclassifications) + if(misclassifiedPositives > allowedMisclassifications) quality = -1; } @@ -561,7 +535,7 @@ + newlyCoveredNegatives.size(); newNode.setCoveredExamples(newlyCoveredPositives, newlyCoveredNegatives); } - + } if(quality == -1) { @@ -575,18 +549,17 @@ solutions.add(refinement); } -// newNode.setCoveredNegativeExamples(quality); + newCandidates.add(newNode); - // candidates.add(newNode); - // candidatesStable.add(newNode); - - - if(quality == getNumberOfNegatives()) + + // we need to make sure that all positives are covered + // before adding something to the overly general list + if((newNode.getCoveredPositives().size() == nrOfPositiveExamples) && quality == getNumberOfNegatives()) overlyGeneralList.add(refinement); - - // System.out.print("."); + } +// System.out.println(newNode.getConcept() + " " + quality); node.addChild(newNode); } } @@ -673,6 +646,7 @@ System.out.println("onnf time percentage: " + df.format(onnfTimePercentage) + "%"); System.out.println("shortening time percentage: " + df.format(shorteningTimePercentage) + "%"); } + System.out.println("properness tests (reasoner/short concept/too weak list): " + propernessTestsReasoner + "/" + propernessTestsAvoidedByShortConceptConstruction + "/" + propernessTestsAvoidedByTooWeakList); System.out.println("concept tests (reasoner/too weak list/overly general list/redundant concepts): " + conceptTestsReasoner + "/" @@ -709,6 +683,22 @@ } return false; } +/* + private Set<Individual> computeQuality(Description refinement, Set<Individual> coveredPositives) { + Set<Individual> ret = new TreeSet<Individual>(); + int misclassifications; + for(Individual i : coveredPositives) { + boolean covered = rs.instanceCheck(refinement, i); + if(!covered) + misclassifications++; + else + ret.add(i); + + if(misclassifications > allowedMisclassifications) + return null; + } + } +*/ public void stop() { stop = true; Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/FlexibleHeuristic.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/FlexibleHeuristic.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/FlexibleHeuristic.java 2008-03-05 18:29:41 UTC (rev 689) @@ -66,10 +66,10 @@ if(n1.isQualityEvaluated() && n2.isQualityEvaluated() && !n1.isTooWeak() && !n2.isTooWeak()) { // alle scores sind negativ, größere scores sind besser - double score1 = -n1.getCoveredNegativeExamples()/(double)nrOfNegativeExamples; + double score1 = -n1.getCoveredNegatives().size()/(double)nrOfNegativeExamples; score1 -= percentPerLengthUnit * n1.getConcept().getLength(); - double score2 = -n2.getCoveredNegativeExamples()/(double)nrOfNegativeExamples; + double score2 = -n2.getCoveredNegatives().size()/(double)nrOfNegativeExamples; score2 -= percentPerLengthUnit * n2.getConcept().getLength(); double diff = score1 - score2; Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/LexicographicHeuristic.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/LexicographicHeuristic.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/LexicographicHeuristic.java 2008-03-05 18:29:41 UTC (rev 689) @@ -32,9 +32,9 @@ // sicherstellen, dass Qualität ausgewertet wurde if(n1.isQualityEvaluated() && n2.isQualityEvaluated() && !n1.isTooWeak() && !n2.isTooWeak()) { - if(n1.getCoveredNegativeExamples()<n2.getCoveredNegativeExamples()) + if(n1.getCoveredNegatives().size()<n2.getCoveredNegatives().size()) return 1; - else if(n1.getCoveredNegativeExamples()>n2.getCoveredNegativeExamples()) + else if(n1.getCoveredNegatives().size()>n2.getCoveredNegatives().size()) return -1; else { //TODO: es wäre geringfügig effizienter die Länge nicht mehrfach zu berechnen Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/MultiHeuristic.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/MultiHeuristic.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/MultiHeuristic.java 2008-03-05 18:29:41 UTC (rev 689) @@ -57,6 +57,11 @@ * N = number of negative examples<br /> * </code></p> * + * TODO: Maybe the number of children of a node could be used instead of + * horiz. exp, because if a node has a very high number of children, the + * algorithm gets stuck easily, while it could still be very agile in other + * parts of the search space. + * * @author Jens Lehmann * */ @@ -65,13 +70,18 @@ private ConceptComparator conceptComparator = new ConceptComparator(); // heuristic parameters - private double expansionPenaltyFactor = 0.01; - private double gainBonusFactor = 1.00; + private double expansionPenaltyFactor; + private double gainBonusFactor; + private double nodeChildPenalty = 0.0001; // examples private int nrOfNegativeExamples; private int nrOfExamples; + public MultiHeuristic(int nrOfPositiveExamples, int nrOfNegativeExamples) { + this(nrOfPositiveExamples, nrOfNegativeExamples, 0.03, 0.5); + } + public MultiHeuristic(int nrOfPositiveExamples, int nrOfNegativeExamples, double expansionPenaltyFactor, double gainBonusFactor) { this.nrOfNegativeExamples = nrOfNegativeExamples; nrOfExamples = nrOfPositiveExamples + nrOfNegativeExamples; @@ -104,11 +114,17 @@ double parentAccuracy = getAccuracy(parent.getCoveredPositives().size(),parent.getCoveredNegatives().size()); gain = accuracy - parentAccuracy; } - return accuracy + gainBonusFactor * gain - expansionPenaltyFactor * node.getHorizontalExpansion(); + return accuracy + gainBonusFactor * gain - expansionPenaltyFactor * node.getHorizontalExpansion() - nodeChildPenalty * node.getChildren().size(); } private double getAccuracy(int coveredPositives, int coveredNegatives) { return (coveredPositives + nrOfNegativeExamples - coveredNegatives)/(double)nrOfExamples; } + + public static double getNodeScore(ExampleBasedNode node, int nrOfPositiveExamples, int nrOfNegativeExamples) { + MultiHeuristic multi = new MultiHeuristic(nrOfPositiveExamples, nrOfNegativeExamples); + return multi.getNodeScore(node); + } + } Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/NodeComparatorStable.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/NodeComparatorStable.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/NodeComparatorStable.java 2008-03-05 18:29:41 UTC (rev 689) @@ -25,44 +25,54 @@ import org.dllearner.utilities.ConceptComparator; /** - * Der Comparator ist stable, weil er nur nach covered negatives, - * Konzeptlänge und Konzeptstring vergleicht, die sich während des Algorithmus nicht - * ändern können. + * This comparator is stable, because it only takes covered examples, concept + * length and the concepts itself (using again a stable comparator) into a + * account, which do not change during the run of the algorithm. * - * @author jl + * @author Jens Lehmann * */ public class NodeComparatorStable implements Comparator<ExampleBasedNode> { - ConceptComparator conceptComparator = new ConceptComparator(); + private ConceptComparator conceptComparator = new ConceptComparator(); - // implementiert public int compare(ExampleBasedNode n1, ExampleBasedNode n2) { - // sicherstellen, dass Qualität ausgewertet wurde + // make sure quality has been evaluated if(n1.isQualityEvaluated() && n2.isQualityEvaluated()) { if(!n1.isTooWeak() && !n2.isTooWeak()) { - if(n1.getCoveredNegativeExamples()<n2.getCoveredNegativeExamples()) + int classificationPointsN1 = n1.getCoveredPositives().size() - n1.getCoveredNegatives().size(); + int classificationPointsN2 = n2.getCoveredPositives().size() - n2.getCoveredNegatives().size(); + + if(classificationPointsN1>classificationPointsN2) return 1; - else if(n1.getCoveredNegativeExamples()>n2.getCoveredNegativeExamples()) + else if(classificationPointsN1<classificationPointsN2) return -1; else { - //TODO: es wäre geringfügig effizienter die Länge nicht mehrfach zu berechnen - if(n1.getConcept().getLength()<n2.getConcept().getLength()) + int lengthN1 = n1.getConcept().getLength(); + int lengthN2 = n2.getConcept().getLength(); + + if(lengthN1<lengthN2) return 1; - else if(n1.getConcept().getLength()>n2.getConcept().getLength()) + else if(lengthN1>lengthN2) return -1; else return conceptComparator.compare(n1.getConcept(), n2.getConcept()); } - } else - return conceptComparator.compare(n1.getConcept(), n2.getConcept()); + } else { + if(n1.isTooWeak() && !n2.isTooWeak()) + return -1; + else if(!n1.isTooWeak() && n2.isTooWeak()) + return 1; + else + return conceptComparator.compare(n1.getConcept(), n2.getConcept()); + } } - throw new RuntimeException("Cannot compare nodes, which have no evaluated quality or are too weak."); + throw new RuntimeException("Cannot compare nodes, which have no evaluated quality."); } - // alle NodeComparators führen zur gleichen Ordnung + // all stable node comparators lead to the same order @Override public boolean equals(Object o) { return (o instanceof NodeComparatorStable); Modified: trunk/src/dl-learner/org/dllearner/core/ReasoningService.java =================================================================== --- trunk/src/dl-learner/org/dllearner/core/ReasoningService.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/core/ReasoningService.java 2008-03-05 18:29:41 UTC (rev 689) @@ -559,6 +559,14 @@ return atomicRolesList; } + public String getBaseURI() { + return reasoner.getBaseURI(); + } + + public Map<String, String> getPrefixes() { + return reasoner.getPrefixes(); + } + public long getInstanceCheckReasoningTimeNs() { return instanceCheckReasoningTimeNs; } Modified: trunk/src/dl-learner/org/dllearner/learningproblems/ScoreTwoValued.java =================================================================== --- trunk/src/dl-learner/org/dllearner/learningproblems/ScoreTwoValued.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/learningproblems/ScoreTwoValued.java 2008-03-05 18:29:41 UTC (rev 689) @@ -53,6 +53,7 @@ public String toString() { String str = ""; str += "score: " + score + "\n"; + str += "accuracy: " + (1 + classificationScore) + "\n"; str += "posAsPos: " + posAsPos + "\n"; str += "positive examples classified as negative: " + posAsNeg + "\n"; str += "negative examples classified as positive: " + negAsPos + "\n"; Modified: trunk/src/dl-learner/org/dllearner/parser/kb.jj =================================================================== --- trunk/src/dl-learner/org/dllearner/parser/kb.jj 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/parser/kb.jj 2008-03-05 18:29:41 UTC (rev 689) @@ -246,13 +246,14 @@ Description c,c1,c2; NamedClass ac; ObjectProperty ar; + DatatypeProperty dp; String s; int i; } { Top() {return new Thing();} | Bottom() {return new Nothing();} - | ac = AtomicConcept() {return ac;} + | LOOKAHEAD(2) ac = AtomicConcept() {return ac;} // | s=Id() {return new AtomicConcept(s);} // | s=String() {return new AtomicConcept(s);} // Parser geht bis zum n�chsten AND oder OR @@ -278,7 +279,9 @@ // | LE() i=Integer() s=Id() "." c=Concept() // {return new LessEqual(i,new AtomicRole(s),c);} | LE() i=Integer() ar=ObjectProperty() "." c=Concept() - {return new ObjectMaxCardinalityRestriction(i,ar,c);} + {return new ObjectMaxCardinalityRestriction(i,ar,c);} + | LOOKAHEAD(4) "(" dp=DatatypeProperty() "IS" "TRUE" ")" { return new BooleanValueRestriction(dp, true); } + | "(" dp=DatatypeProperty() "IS" "FALSE" ")" { return new BooleanValueRestriction(dp, false); } } void Or() : {} { <OR> } Modified: trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/reasoning/FastInstanceChecker.java 2008-03-05 18:29:41 UTC (rev 689) @@ -301,8 +301,14 @@ } @Override - public SortedSet<Individual> retrieval(Description concept) { - return rs.retrieval(concept); + public SortedSet<Individual> retrieval(Description concept) throws ReasoningMethodUnsupportedException { +// return rs.retrieval(concept); + SortedSet<Individual> inds = new TreeSet<Individual>(); + for(Individual i : individuals) { + if(instanceCheck(concept,i)) + inds.add(i); + } + return inds; } /* Modified: trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/reasoning/OWLAPIReasoner.java 2008-03-05 18:29:41 UTC (rev 689) @@ -546,7 +546,8 @@ @Override public SortedSet<Individual> retrieval(Description concept) { - OWLDescription d = getOWLAPIDescription(concept); +// OWLDescription d = getOWLAPIDescription(concept); + OWLDescription d = OWLAPIDescriptionConvertVisitor.getOWLDescription(concept); Set<OWLIndividual> individuals = null; try { individuals = reasoner.getIndividuals(d, false); Modified: trunk/src/dl-learner/org/dllearner/utilities/ConceptComparator.java =================================================================== --- trunk/src/dl-learner/org/dllearner/utilities/ConceptComparator.java 2008-03-05 12:07:28 UTC (rev 688) +++ trunk/src/dl-learner/org/dllearner/utilities/ConceptComparator.java 2008-03-05 18:29:41 UTC (rev 689) @@ -67,8 +67,35 @@ return ((NamedClass)concept1).getName().compareTo(((NamedClass)concept2).getName()); else return -1; + } else if(concept1 instanceof BooleanValueRestriction) { + if(concept2 instanceof Nothing || concept2 instanceof NamedClass) { + return 1; + } else if(concept2 instanceof BooleanValueRestriction) { + // first criterion: name of the properties + int cmp = rc.compare(((BooleanValueRestriction)concept1).getRestrictedPropertyExpresssion(), ((BooleanValueRestriction)concept2).getRestrictedPropertyExpresssion()); + + // second criterion: value of the properties (it should rarely happen that + // both boolean values are present since this is a contradiction or superfluous) + if(cmp == 0) { + boolean val1 = ((BooleanValueRestriction)concept1).getBooleanValue(); + boolean val2 = ((BooleanValueRestriction)concept2).getBooleanValue(); + if(val1) { + if(val2) + return 0; + else + return 1; + } else { + if(val2) + return -1; + else + return 0; + } + } else + return cmp; + } else + return -1; } else if(concept1 instanceof Thing) { - if(concept2 instanceof Nothing || concept2 instanceof NamedClass) + if(concept2 instanceof Nothing || concept2 instanceof NamedClass || concept2 instanceof BooleanValueRestriction) return 1; else if(concept2 instanceof Thing) return 0; @@ -104,17 +131,6 @@ return roleCompare; } else return -1; - } else if(concept1 instanceof BooleanValueRestriction) { - if(concept2.getChildren().size()<1 || concept2 instanceof Negation || concept2 instanceof ObjectQuantorRestriction) { - return 1; - } else if(concept2 instanceof BooleanValueRestriction) { - int cmp = rc.compare(((BooleanValueRestriction)concept1).getRestrictedPropertyExpresssion(), ((BooleanValueRestriction)concept2).getRestrictedPropertyExpresssion()); - if(cmp == 0) - return compare(concept1.getChild(0), concept2.getChild(0)); - else - return cmp; - } else - return -1; } else if(concept1 instanceof Intersection) { if(concept2.getChildren().size()<2) return 1; @@ -162,12 +178,6 @@ } else throw new RuntimeException(concept1.toString()); } - - /* - private int compareRole(Role r1, Role r2) { - return r1.toString().compareTo(r2.toString()); - } - */ // TODO: Vergleich zwischen ConceptComparators: immer identisch // (testen, ob das bessere Performance bringt) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <jen...@us...> - 2008-03-11 17:55:29
|
Revision: 701 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=701&view=rev Author: jenslehmann Date: 2008-03-11 10:55:22 -0700 (Tue, 11 Mar 2008) Log Message: ----------- - search tree traversal and candidate set reduction ML techniques implemented - small test example for double datatype learning - PTE2 carcinogenesis examples added Modified Paths: -------------- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java trunk/src/dl-learner/org/dllearner/algorithms/refexamples/MultiHeuristic.java trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java Added Paths: ----------- trunk/examples/datatypes/ trunk/examples/datatypes/double.conf trunk/examples/datatypes/double.owl Added: trunk/examples/datatypes/double.conf =================================================================== --- trunk/examples/datatypes/double.conf (rev 0) +++ trunk/examples/datatypes/double.conf 2008-03-11 17:55:22 UTC (rev 701) @@ -0,0 +1,20 @@ +/** + * Simple test example for double datatypes using the height + * of persons. + * + * possible solution: + * height <= 1.84 + * + * Copyright (C) 2008, Jens Lehmann + */ + +algorithm = refexamples; +// refexamples.writeSearchTree = true; +refexamples.searchTreeFile = "log/doubleTree.txt"; +reasoner = fastInstanceChecker; + +import("double.owl"); + ++"http://dl-learner.org/examples/double#frank" +-"http://dl-learner.org/examples/double#peter" +-"http://dl-learner.org/examples/double#susan" Added: trunk/examples/datatypes/double.owl =================================================================== --- trunk/examples/datatypes/double.owl (rev 0) +++ trunk/examples/datatypes/double.owl 2008-03-11 17:55:22 UTC (rev 701) @@ -0,0 +1,121 @@ +<?xml version="1.0"?> + + +<!DOCTYPE rdf:RDF [ + <!ENTITY owl "http://www.w3.org/2002/07/owl#" > + <!ENTITY owl11 "http://www.w3.org/2006/12/owl11#" > + <!ENTITY xsd "http://www.w3.org/2001/XMLSchema#" > + <!ENTITY owl11xml "http://www.w3.org/2006/12/owl11-xml#" > + <!ENTITY rdfs "http://www.w3.org/2000/01/rdf-schema#" > + <!ENTITY double "http://dl-learner.org/examples/double#" > + <!ENTITY rdf "http://www.w3.org/1999/02/22-rdf-syntax-ns#" > +]> + + +<rdf:RDF xmlns="http://dl-learner.org/examples/double#" + xml:base="http://dl-learner.org/examples/double" + xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" + xmlns:owl11="http://www.w3.org/2006/12/owl11#" + xmlns:owl11xml="http://www.w3.org/2006/12/owl11-xml#" + xmlns:owl="http://www.w3.org/2002/07/owl#" + xmlns:xsd="http://www.w3.org/2001/XMLSchema#" + xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" + xmlns:double="http://dl-learner.org/examples/double#"> + <owl:Ontology rdf:about=""/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Data properties + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://dl-learner.org/examples/double#height --> + + <owl:DatatypeProperty rdf:about="#height"> + <rdfs:domain rdf:resource="#Person"/> + <rdfs:range rdf:resource="&xsd;double"/> + </owl:DatatypeProperty> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Classes + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://dl-learner.org/examples/double#Man --> + + <owl:Class rdf:about="#Man"> + <rdfs:subClassOf rdf:resource="#Person"/> + </owl:Class> + + + + <!-- http://dl-learner.org/examples/double#Person --> + + <owl:Class rdf:about="#Person"> + <rdfs:subClassOf rdf:resource="&owl;Thing"/> + </owl:Class> + + + + <!-- http://dl-learner.org/examples/double#Woman --> + + <owl:Class rdf:about="#Woman"> + <rdfs:subClassOf rdf:resource="#Person"/> + </owl:Class> + + + + <!-- http://www.w3.org/2002/07/owl#Thing --> + + <owl:Class rdf:about="&owl;Thing"/> + + + + <!-- + /////////////////////////////////////////////////////////////////////////////////////// + // + // Individuals + // + /////////////////////////////////////////////////////////////////////////////////////// + --> + + + + + <!-- http://dl-learner.org/examples/double#frank --> + + <Man rdf:about="#frank"> + <height rdf:datatype="&xsd;double">1.82</height> + </Man> + + + + <!-- http://dl-learner.org/examples/double#peter --> + + <Man rdf:about="#peter"> + <height rdf:datatype="&xsd;double">1.91</height> + </Man> + + + + <!-- http://dl-learner.org/examples/double#susan --> + + <Woman rdf:about="#susan"> + <height rdf:datatype="&xsd;double">1.86</height> + </Woman> +</rdf:RDF> Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java 2008-03-10 17:53:52 UTC (rev 700) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedNode.java 2008-03-11 17:55:22 UTC (rev 701) @@ -174,6 +174,10 @@ return ret; } + public double getAccuracy(int nrOfPositiveExamples, int nrOfNegativeExamples) { + return (coveredPositives.size() + nrOfNegativeExamples - coveredNegatives.size())/(double)(nrOfPositiveExamples+nrOfNegativeExamples); + } + public Set<Individual> getCoveredPositives() { return coveredPositives; } Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java 2008-03-10 17:53:52 UTC (rev 700) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/ExampleBasedROLearner.java 2008-03-11 17:55:22 UTC (rev 701) @@ -25,6 +25,7 @@ import java.util.Iterator; import java.util.LinkedList; import java.util.List; +import java.util.NavigableSet; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; @@ -106,6 +107,18 @@ // ALL r.TOP is improper and automatically expanded further private boolean testProperness = false; + // tree traversal means to run through the most promising concepts + // and connect them in an intersection to find a solution + // (this is called irregularly e.g. every 100 seconds) + private boolean useTreeTraversal = false; + + // candidate reduction: using this mechanism we can simulate + // the divide&conquer approach in many ILP programs using a + // clause by clause search; after a period of time the candidate + // set is reduced to focus CPU time on the most promising concepts + private boolean useCandidateReduction = true; + private int candidatePostReductionSize = 30; + // setting to true gracefully stops the algorithm private boolean stop = false; @@ -228,7 +241,7 @@ public void start() { // calculate quality threshold required for a solution - allowedMisclassifications = (int) Math.round(noise * nrOfExamples); + allowedMisclassifications = (int) Math.round(noise * nrOfExamples); // start search with start class if(startDescription == null) { @@ -250,6 +263,11 @@ algorithmStartTime = System.nanoTime(); long lastPrintTime = 0; + long lastTreeTraversalTime = System.nanoTime(); + long lastReductionTime = System.nanoTime(); + // try a traversal after 100 seconds + long traversalInterval = 1000l * 1000000000l; + long reductionInterval = 100l * 1000000000l; long currentTime; while(!solutionFound && !stop) { @@ -262,6 +280,18 @@ logger.debug("--- loop " + loop + " started ---"); } + // traverse the current search tree to find a solution + if(useTreeTraversal && (currentTime - lastTreeTraversalTime > traversalInterval)) { + traverseTree(); + lastTreeTraversalTime = System.nanoTime(); + } + + // reduce candidates to focus on promising concepts + if(useCandidateReduction && (currentTime - lastReductionTime > reductionInterval)) { + reduceCandidates(); + lastReductionTime = System.nanoTime(); + } + // chose best node according to heuristics bestNode = candidates.last(); // extend best node @@ -695,6 +725,130 @@ } return false; } + + // TODO: investigate whether it makes sense not to store all individuals + // in the nodes, but instead perform instance checks in tree traversal + // (it is only run in large intervals so it shouldn't be too expensive) + private void traverseTree() { +// ExampleBasedNode startNode = candidatesStable.last(); + ExampleBasedNode startNode = findBestTraversalStartNode(); + Description currentDescription = startNode.getConcept(); + Set<Individual> currentCoveredPos = startNode.getCoveredPositives(); + Set<Individual> currentCoveredNeg = startNode.getCoveredNegatives(); + double currentAccuracy = startNode.getAccuracy(nrOfPositiveExamples, nrOfNegativeExamples); + int currentMisclassifications = nrOfPositiveExamples - currentCoveredPos.size() + currentCoveredNeg.size(); + System.out.println("tree traversal start node " + startNode.getShortDescription(nrOfPositiveExamples, nrOfNegativeExamples, baseURI)); + System.out.println("tree traversal start accuracy: " + currentAccuracy); + int i=0; + // start from the most promising nodes + NavigableSet<ExampleBasedNode> reverseView = candidatesStable.descendingSet(); + for(ExampleBasedNode currNode : reverseView) { + // compute covered positives and negatives + SortedSet<Individual> newCoveredPositives = new TreeSet<Individual>(currentCoveredPos); + newCoveredPositives.retainAll(currNode.getCoveredPositives()); + SortedSet<Individual> newCoveredNegatives = new TreeSet<Individual>(currentCoveredNeg); + newCoveredNegatives.retainAll(currNode.getCoveredNegatives()); + + // compute the accuracy we would get by adding this node + double accuracy = (newCoveredPositives.size() + nrOfNegativeExamples - newCoveredNegatives.size())/(double)(nrOfPositiveExamples+nrOfNegativeExamples); + int misclassifications = nrOfPositiveExamples - newCoveredPositives.size() + newCoveredNegatives.size(); + int misclassifiedPositives = nrOfPositiveExamples - newCoveredPositives.size(); + + int lostPositives = currentCoveredPos.size() - newCoveredPositives.size(); + + // TODO: maybe we should also consider a minimum improvement when adding something + // otherwise we could overfit + // we give double weith to lost positives, i.e. when one positive is lost at least + // two negatives need to be uncovered + boolean consider = (misclassifications + lostPositives < currentMisclassifications) && + (misclassifiedPositives <= allowedMisclassifications); +// boolean consider = (misclassifications < currentMisclassifications) && +// (misclassifiedPositives <= allowedMisclassifications); + + // concept has been chosen, so construct it + if(consider) { + + // construct a new concept as intersection of both + Intersection mc = new Intersection(currentDescription, currNode.getConcept()); + + ConceptTransformation.cleanConceptNonRecursive(mc); + ConceptTransformation.transformToOrderedNegationNormalFormNonRecursive(mc, conceptComparator); + +// System.out.println("extended concept to: " + mc); + System.out.println("misclassifications: " + misclassifications); + System.out.println("misclassified positives: " + misclassifiedPositives); + System.out.println("accuracy: " + accuracy); + + // update variables + currentDescription = mc; + currentCoveredPos = newCoveredPositives; + currentCoveredNeg = newCoveredNegatives; + currentMisclassifications = misclassifications; + currentAccuracy = accuracy; + + if(accuracy > 1 - noise) { + System.out.println("traversal found " + mc); + System.out.println("accuracy: " + accuracy); + System.exit(0); + } + } + + i++; + if(i==1000) + break; + } + + } + + // we look for a node covering many positives and hopefully + // few negatives; we give a strong penalty on uncovered positives + private ExampleBasedNode findBestTraversalStartNode() { + // 2 points for each covered pos + 1 point for each uncovered neg + int currScore = 0; + int i = 0; + ExampleBasedNode currNode = null; + NavigableSet<ExampleBasedNode> reverseView = candidatesStable.descendingSet(); + for(ExampleBasedNode node : reverseView) { + int score = 2 * node.getCoveredPositives().size() + (nrOfNegativeExamples - node.getCoveredNegatives().size()); + if(score > currScore) { + currScore = score; + currNode = node; + } + i++; + // limit search because stable candidate set can grow very large + if(i == 10000) + break; + } + return currNode; + } + + private void reduceCandidates() { + Iterator<ExampleBasedNode> it = candidatesStable.descendingIterator(); + Set<ExampleBasedNode> promisingNodes = new HashSet<ExampleBasedNode>(); + int i = 0; + while(it.hasNext() && promisingNodes.size()<candidatePostReductionSize) { + ExampleBasedNode node = it.next(); +// System.out.println(node.getShortDescription(nrOfPositiveExamples, nrOfNegativeExamples, baseURI)); + // first criterion: the considered node should have an accuracy gain over its parent + // (avoids to use only the most promising node + all its refinements with equal accuracy) + boolean hasAccuracyGain = (node.getParent() == null) || (node.getCoveredPositives().size() != node.getParent().getCoveredPositives().size()) + || (node.getCoveredNegatives().size() != node.getParent().getCoveredNegatives().size()); + // second criterion: uncovered positives; it does not make much sense to pick nodes with + // low potential for reaching a solution (already at the limit of misclassified positives) + int misclassifiedPositives = nrOfPositiveExamples - node.getCoveredPositives().size(); + boolean hasRefinementPotential = (misclassifiedPositives <= Math.floor(0.65d*allowedMisclassifications)); + boolean keep = hasAccuracyGain && hasRefinementPotential; + if(keep) { + promisingNodes.add(node); + } + i++; + } + candidates.retainAll(promisingNodes); + System.out.println("searched " + i + " nodes and picked the following promising descriptions:"); + for(ExampleBasedNode node : promisingNodes) + System.out.println(node.getShortDescription(nrOfPositiveExamples, nrOfNegativeExamples, baseURI)); + } + /* private Set<Individual> computeQuality(Description refinement, Set<Individual> coveredPositives) { Set<Individual> ret = new TreeSet<Individual>(); Modified: trunk/src/dl-learner/org/dllearner/algorithms/refexamples/MultiHeuristic.java =================================================================== --- trunk/src/dl-learner/org/dllearner/algorithms/refexamples/MultiHeuristic.java 2008-03-10 17:53:52 UTC (rev 700) +++ trunk/src/dl-learner/org/dllearner/algorithms/refexamples/MultiHeuristic.java 2008-03-11 17:55:22 UTC (rev 701) @@ -21,8 +21,9 @@ import java.util.List; -import org.dllearner.core.owl.BooleanValueRestriction; +import org.dllearner.core.owl.DatatypeValueRestriction; import org.dllearner.core.owl.Description; +import org.dllearner.core.owl.Thing; import org.dllearner.utilities.ConceptComparator; /** @@ -77,7 +78,7 @@ private double expansionPenaltyFactor; private double gainBonusFactor; private double nodeChildPenalty = 0.0001; - private double startNodeBonus = 0.1; + private double startNodeBonus = 0.8; // examples private int nrOfNegativeExamples; @@ -140,9 +141,15 @@ private static int getHeuristicLengthBonus(Description description) { int bonus = 0; - if(description instanceof BooleanValueRestriction) + // do not count TOP symbols (in particular in ALL r.TOP and EXISTS r.TOP) + // as they provide no extra information + if(description instanceof Thing) bonus = 1; + // some bonus for doubles because they are already penalised by length 3 + if(description instanceof DatatypeValueRestriction) + bonus = 1; + List<Description> children = description.getChildren(); for(Description child : children) { bonus += getHeuristicLengthBonus(child); Modified: trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java =================================================================== --- trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java 2008-03-10 17:53:52 UTC (rev 700) +++ trunk/src/dl-learner/org/dllearner/examples/Carcinogenesis.java 2008-03-11 17:55:22 UTC (rev 701) @@ -122,8 +122,10 @@ public static void main(String[] args) throws FileNotFoundException, IOException, ParseException { + // TODO: newgroups are not mapped currently String[] files = new String[] { "newgroups.pl", "ames.pl", "atoms.pl", "bonds.pl", "gentoxprops.pl", - "ind_nos.pl", "ind_pos.pl", + "ind_nos.pl", "ind_pos.pl", "pte2/canc_nos.pl", "pte2/pte2ames.pl", "pte2/pte2atoms.pl", + "pte2/pte2bonds.pl", "pte2/pte2gentox.pl", "pte2/pte2ind_nos.pl", "pte2/pte2newgroups.pl" // "train.b" => not a pure Prolog file but Progol/Aleph specific }; File owlFile = new File("examples/carcinogenesis/pte.owl"); @@ -252,20 +254,23 @@ appendNegExamples(confTrainFile, negTrainExamples); // generating test examples for PTE-1 - File confPTE1File = new File("examples/carcinogenesis/testpte1.conf"); - Files.clearFile(confPTE1File); + // => put all in one file, because they were used as training for PTE-2 + // File confPTE1File = new File("examples/carcinogenesis/testpte1.conf"); + // Files.clearFile(confPTE1File); File testPTE1Positives = new File(prologDirectory + "pte1.f"); File testPTE1Negatives = new File(prologDirectory + "pte1.n"); List<Individual> posPTE1Examples = getExamples(testPTE1Positives); List<Individual> negPTE1Examples = getExamples(testPTE1Negatives); - appendPosExamples(confPTE1File, posPTE1Examples); - appendNegExamples(confPTE1File, negPTE1Examples); + appendPosExamples(confTrainFile, posPTE1Examples); + appendNegExamples(confTrainFile, negPTE1Examples); - // TODO: how to get PTE-2 predictions? the pte-2 directory suggests - // that all are positive which is not true (according to the papers) - // solution: go to "http://ntp-server.niehs.nih.gov/" and click - // on "Testing Status of Agents at NTP" + // create a PTE-2 test file + File confPTE2File = new File("examples/carcinogenesis/testpte2.conf"); + Files.clearFile(confPTE2File); + Files.appendFile(confPTE2File, "import(\"pte.owl\");\nreasoner=fastInstanceChecker;\n\n"); + Files.appendFile(confPTE2File, getPTE2Examples()); + } private static List<Axiom> mapClause(Clause clause) throws IOException, ParseException { @@ -361,7 +366,7 @@ axioms.add(dpa); // either parse this or ashby_alert - not both - ashby_alert contains // all information in ind already - } else if (headName.equals("ind")) { + } else if (headName.equals("ind") || headName.equals("ring_no")) { String compoundName = head.getArgument(0).toPLString(); String structureName = head.getArgument(1).toPLString(); int count = Integer.parseInt(head.getArgument(2).toPLString()); @@ -555,4 +560,86 @@ newGroups.addAll(list); } + /** + * <p>To find out whether a substance is carinogenetic go to + * "http://ntp-server.niehs.nih.gov/" and click + * on "Testing Status of Agents at NTP".</p> + * + * Levels: + * <ul> + * <li>CE = clear evidence</li> + * <li>SE = some evidence</li> + * <li>E = equivocal evidence</li> + * <li>NE = no evidence</li> + * </ul> + * Levels CE and SE are positive examples. E and NE negative examples. + * Experiments are performed on rats and mice of both genders, so we + * have four evidence values. An example is positive if at least one + * value is SE or CE. + * + * <p>Some values are taken from the IJCAI-97 paper of Muggleton.</p> + * + * <p>Positives (19): <br /> + * <ul> + * <li>t3 (SE+3NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BCACAFD4-123F-7908-7B521E4F665EFBD9</li> + * <li>t5: paper</li> + * <li>t7: paper</li> + * <li>t8: paper</li> + * <li>t9 (3CE+SE): http://ntp.niehs.nih.gov/index.cfm?objectid=BD7C6869-123F-7908-7BDEA4CFAA55CEA8</li> + * <li>t10: paper</li> + * <li>t12 (2SE+E+NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BCB0ADE0-123F-7908-7BEC101C7309C4DE</li> + * <li>t14 (2CE+2NE) probably 111-42-2 instead of 11-42-2: http://ntp.niehs.nih.gov/index.cfm?objectid=BCC60FF1-123F-7908-7B2D579AA48DE90C</li> + * <li>t15: paper</li> + * <li>t16 (2CE+SE+E): http://ntp.niehs.nih.gov/index.cfm?objectid=BCC5D9CE-123F-7908-7B959CCE5262468A</li> + * <li>t18 (2SE+E+NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BCA087AA-123F-7908-7B79FDFDE3CDCF87</li> + * <li>t19 (2CE+E+NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BCAE5690-123F-7908-7B02E35E2BB57694</li> + * <li>t20 (2SE+E+NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BCF95607-123F-7908-7B0761D3C515CC12</li> + * <li>t21 (CE+3NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BCFCB63C-123F-7908-7BF910C2783AE9FE</li> + * <li>t22 (SE+3NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BD8345C2-123F-7908-7BC52FEF80F110E1</li> + * <li>t23 (4CE): http://ntp.niehs.nih.gov/index.cfm?objectid=BCADD2D9-123F-7908-7B5C8180FE80B22F</li> + * <li>t24 (CE+E): http://ntp.niehs.nih.gov/index.cfm?objectid=BCFB19FF-123F-7908-7B845E176F13E6E1</li> + * <li>t25 (3CE+SE): http://ntp.niehs.nih.gov/index.cfm?objectid=BD2D2A62-123F-7908-7B0DA824E782754C</li> + * <li>t30 (2CE+SE+E) : http://ntp.niehs.nih.gov/index.cfm?objectid=BCB13734-123F-7908-7BEBA533E35A48B7</li> + * </ul> + * </p> + * + * <p>Negatives (10): + * <ul> + * <li>t1 (4NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BD9FF53C-123F-7908-7B123DAE0A25B122 </li> + * <li>t2 (4NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BCF8651E-123F-7908-7B21DD5ED83CD0FF </li> + * <li>t4: paper</li> + * <li>t6: paper</li> + * <li>t11: paper</li> + * <li>t13 (4NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BD136ED6-123F-7908-7B619EE79F2FD062</li> + * <li>t17: paper</li> + * <li>t26 (2E+2NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BD1E6209-123F-7908-7B95EB8BAE662CE7</li> + * <li>t27 (E+3NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BCAC5D00-123F-7908-7BC46ECB72A6C91B</li> + * <li>t28 (E+3NE): http://ntp.niehs.nih.gov/index.cfm?objectid=BD34E02A-123F-7908-7BC6791917B591DF</li> + * </ul> + * </p> + * + * <p>Unclear (1): + * <ul> + * <li>t29: probably a negative (see http://ntp.niehs.nih.gov/index.cfm?objectid=BD855EA1-123F-7908-7B573FC3C08188DC) but + * no tests directly for this substance</li> + * </ul> + * </p> + * @return A string for all examples as used in the conf file. + */ + public static String getPTE2Examples() { + String[] pos = new String[] {"t3","t5","t7","t8","t9","t10","t12", + "t14","t15","t16","t18","t19","t20","t21","t22","t23","t24", + "t25","t30"}; + String[] neg = new String[] {"t1", "t2", "t4", "t6", "t11", "t13", + "t17","t26","t27","t28"}; + + String ret = ""; + for(String posEx : pos) + ret += "+" + getURI2(posEx) + "\n"; + for(String negEx : neg) + ret += "-" + getURI2(negEx) + "\n"; + + return ret; + } + } Modified: trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java =================================================================== --- trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java 2008-03-10 17:53:52 UTC (rev 700) +++ trunk/src/dl-learner/org/dllearner/refinementoperators/RhoDRDown.java 2008-03-11 17:55:22 UTC (rev 701) @@ -127,7 +127,7 @@ // splits for double datatype properties in ascening order private Map<DatatypeProperty,List<Double>> splits = new TreeMap<DatatypeProperty,List<Double>>(); - private int maxNrOfSplits = 20; + private int maxNrOfSplits = 10; // staistics public long mComputationTimeNs = 0; @@ -359,6 +359,7 @@ } } else if (description instanceof DatatypeSomeRestriction) { + DatatypeSomeRestriction dsr = (DatatypeSomeRestriction) description; DatatypeProperty dp = (DatatypeProperty) dsr.getRestrictedPropertyExpression(); DataRange dr = dsr.getDataRange(); @@ -956,7 +957,7 @@ int nrOfValues = values.size(); // create split set List<Double> splitsDP = new LinkedList<Double>(); - for(int splitNr=0; splitNr < maxNrOfSplits; splitNr++) { + for(int splitNr=0; splitNr < Math.min(maxNrOfSplits,nrOfValues-1); splitNr++) { int index; if(nrOfValues<=maxNrOfSplits) index = splitNr; This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ku...@us...> - 2008-03-12 19:25:51
|
Revision: 705 http://dl-learner.svn.sourceforge.net/dl-learner/?rev=705&view=rev Author: kurzum Date: 2008-03-12 12:25:45 -0700 (Wed, 12 Mar 2008) Log Message: ----------- KRK not completed Added Paths: ----------- trunk/examples/krk/ trunk/src/dl-learner/org/dllearner/examples/KRK.java Added: trunk/src/dl-learner/org/dllearner/examples/KRK.java =================================================================== --- trunk/src/dl-learner/org/dllearner/examples/KRK.java (rev 0) +++ trunk/src/dl-learner/org/dllearner/examples/KRK.java 2008-03-12 19:25:45 UTC (rev 705) @@ -0,0 +1,213 @@ +package org.dllearner.examples; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.net.URI; +import java.util.HashMap; +import java.util.Iterator; +import java.util.SortedSet; +import java.util.StringTokenizer; +import java.util.TreeSet; + +import org.dllearner.core.owl.ClassAssertionAxiom; +import org.dllearner.core.owl.DatatypeProperty; +import org.dllearner.core.owl.Individual; +import org.dllearner.core.owl.KB; +import org.dllearner.core.owl.NamedClass; +import org.dllearner.core.owl.ObjectProperty; +import org.dllearner.core.owl.ObjectPropertyAssertion; +import org.dllearner.core.owl.TransitiveObjectPropertyAxiom; +import org.dllearner.reasoning.OWLAPIReasoner; + +public class KRK { + + // REMEMBER + // FILES are letters + // RANKS are numbers + + private static URI ontologyURI = URI.create("http://www.test.de/test"); + static SortedSet<String> fileSet; + static SortedSet<String> rankSet; + static SortedSet<String> classSet; + static HashMap<String,SortedSet<String>> classToInd; + /** + * @param args + */ + public static void main(String[] args) { + + classToInd = new HashMap<String,SortedSet<String>>(); + fileSet = new TreeSet<String>(); + rankSet = new TreeSet<String>(); + classSet = new TreeSet<String>(); + KB kb=new KB(); + + + fileSet.add("a"); fileSet.add("b"); fileSet.add("c"); fileSet.add("d"); fileSet.add("e"); fileSet.add("f"); fileSet.add("g"); fileSet.add("h"); + + for (int count = 1; count < 9; count++) { + rankSet.add("f"+count); + } + + NamedClass[] nc=new NamedClass[]{ + getAtomicConcept("Game"), + getAtomicConcept("WKing"), + getAtomicConcept("WRook"), + getAtomicConcept("BKing")}; + + ObjectProperty rank= getRole("hasRank"); + ObjectProperty file= getRole("hasFile"); + ObjectProperty piece= getRole("hasPiece"); + ObjectProperty lessThan= getRole("strictLessThan"); + //ObjectProperty rank= getRole("hasRank"); + + Individual game; + Individual wking; + Individual wrook; + Individual bking; + + kb.addRBoxAxiom(new TransitiveObjectPropertyAxiom(lessThan)); + + Iterator<String> it = fileSet.iterator(); + Individual current = getIndividual(it.next()); + Individual next; + while (it.hasNext()){ + next=getIndividual(it.next()); + kb.addABoxAxiom(new ObjectPropertyAssertion(lessThan,current,next)); + current=next; + + } + + it = rankSet.iterator(); + current = getIndividual(it.next()); + next=null; + while (it.hasNext()){ + next=getIndividual(it.next()); + kb.addABoxAxiom(new ObjectPropertyAssertion(lessThan,current,next)); + current=next; + + } + + + String fileIn = "examples/krk/krkopt.data"; + + // Datei öffnen + BufferedReader in = null; + try { + in = new BufferedReader(new FileReader(fileIn)); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + + try{ + String line = ""; + String[] ar = new String[6]; + + + int x=0; + while ( (line =in.readLine()) != null) + { x++; + //if(x % 3000 == 0 ) System.out.println("Currently at line "+x); + ar = tokenize(line); + game = getIndividual("game"+x); + wking = getIndividual("wking"+x); + wrook = getIndividual("wrook"+x); + bking = getIndividual("bking"+x); + + classSet.add(ar[6]); + + //ar[0]); + + // CLASSES + kb.addABoxAxiom(new ClassAssertionAxiom(nc[0],game)); + kb.addABoxAxiom(new ClassAssertionAxiom(getAtomicConcept(ar[6]),game)); + kb.addABoxAxiom(new ClassAssertionAxiom(nc[1],wking)); + kb.addABoxAxiom(new ClassAssertionAxiom(nc[2],wrook)); + kb.addABoxAxiom(new ClassAssertionAxiom(nc[3],bking)); + + //PROPERTIES + kb.addABoxAxiom(new ObjectPropertyAssertion(piece,game,wking)); + kb.addABoxAxiom(new ObjectPropertyAssertion(piece,game,wrook)); + kb.addABoxAxiom(new ObjectPropertyAssertion(piece,game,bking)); + + + kb.addABoxAxiom(new ObjectPropertyAssertion(rank,wking,getIndividual(ar[0]))); + kb.addABoxAxiom(new ObjectPropertyAssertion(file,wking,getIndividual("f"+ar[1]))); + + kb.addABoxAxiom(new ObjectPropertyAssertion(rank,wrook,getIndividual(ar[2]))); + kb.addABoxAxiom(new ObjectPropertyAssertion(file,wrook,getIndividual("f"+ar[3]))); + + kb.addABoxAxiom(new ObjectPropertyAssertion(rank,bking,getIndividual(ar[4]))); + kb.addABoxAxiom(new ObjectPropertyAssertion(file,bking,getIndividual("f"+ar[5]))); + + //kb.addABoxAxiom(new ClassAssertionAxiom(new NamedClass("Game"),new Individual(names[0]+(x++)))); + //kb.addABoxAxiom(new ClassAssertionAxiom(new NamedClass("Game"),new Individual(names[0]+(x++)))); + + + + //System.out.println(line); + + } + System.out.println("Writing owl"); + File owlfile = new File("examples/krk/test.owl"); + //System.out.println(kb.toString("http://www.test.de/test", new HashMap<String, String>())); + OWLAPIReasoner.exportKBToOWL(owlfile, kb, ontologyURI); + + }catch (Exception e) {e.printStackTrace();} + System.out.println("Done"); + } + + public static String[] tokenize (String s) { + StringTokenizer st=new StringTokenizer(s,","); + + String tmp=""; + String[] ret = new String[7]; + int x = 0; + while (st.hasMoreTokens()){ + tmp=st.nextToken(); + if(x==6)tmp=tmp.toUpperCase(); + ret[x] = tmp; + x++; + } + return ret; + + } + + private static Individual getIndividual(String name) { + return new Individual(ontologyURI + "#" + name); + } + + private static ObjectProperty getRole(String name) { + return new ObjectProperty(ontologyURI + "#" + name); + } + + @SuppressWarnings("unused") + private static DatatypeProperty getDatatypeProperty(String name) { + return new DatatypeProperty(ontologyURI + "#" + name); + } + + private static NamedClass getAtomicConcept(String name) { + return new NamedClass(ontologyURI + "#" + name); + } + + @SuppressWarnings("unused") + private static String getURI(String name) { + return ontologyURI + "#" + name; + } + + @SuppressWarnings("unused") + private static ClassAssertionAxiom getConceptAssertion(String concept, String i) { + Individual ind = getIndividual(i); + NamedClass c = getAtomicConcept(concept); + return new ClassAssertionAxiom(c, ind); + } + + @SuppressWarnings("unused") + private static ObjectPropertyAssertion getRoleAssertion(String role, String i1, String i2) { + Individual ind1 = getIndividual(i1); + Individual ind2 = getIndividual(i2); + ObjectProperty ar = getRole(role); + return new ObjectPropertyAssertion(ar, ind1, ind2); + } +} This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |